Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(dependencies)!: Upgrade to google-cloud-datastore 2.x #841

Merged
merged 7 commits into from
Dec 6, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion google/cloud/ndb/_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -738,4 +738,4 @@ def global_cache_key(key):
Returns:
bytes: The cache key.
"""
return _PREFIX + key.to_protobuf().SerializeToString()
return _PREFIX + key.to_protobuf()._pb.SerializeToString()
40 changes: 20 additions & 20 deletions google/cloud/ndb/_datastore_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@

from google.api_core import exceptions as core_exceptions
from google.cloud.datastore import helpers
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
from google.cloud.datastore_v1.types import datastore as datastore_pb2
from google.cloud.datastore_v1.types import entity as entity_pb2

from google.cloud.ndb import context as context_module
from google.cloud.ndb import _batch
Expand All @@ -33,9 +33,9 @@
from google.cloud.ndb import tasklets
from google.cloud.ndb import utils

EVENTUAL = datastore_pb2.ReadOptions.EVENTUAL
EVENTUAL = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
EVENTUAL_CONSISTENCY = EVENTUAL # Legacy NDB
STRONG = datastore_pb2.ReadOptions.STRONG
STRONG = datastore_pb2.ReadOptions.ReadConsistency.STRONG

_DEFAULT_TIMEOUT = None
_NOT_FOUND = object()
Expand Down Expand Up @@ -144,7 +144,7 @@ def lookup(key, options):
if not key_locked:
if result:
entity_pb = entity_pb2.Entity()
entity_pb.MergeFromString(result)
entity_pb._pb.MergeFromString(result)

elif use_datastore:
lock = yield _cache.global_lock_for_read(cache_key, result)
Expand All @@ -165,7 +165,7 @@ def lookup(key, options):
if use_global_cache and not key_locked:
if entity_pb is not _NOT_FOUND:
expires = context._global_cache_timeout(key, options)
serialized = entity_pb.SerializeToString()
serialized = entity_pb._pb.SerializeToString()
yield _cache.global_compare_and_swap(
cache_key, serialized, expires=expires
)
Expand Down Expand Up @@ -211,7 +211,7 @@ def add(self, key):
Returns:
tasklets.Future: A future for the eventual result.
"""
todo_key = key.to_protobuf().SerializeToString()
todo_key = key.to_protobuf()._pb.SerializeToString()
future = tasklets.Future(info="Lookup({})".format(key))
self.todo.setdefault(todo_key, []).append(future)
return future
Expand All @@ -221,7 +221,7 @@ def idle_callback(self):
keys = []
for todo_key in self.todo.keys():
key_pb = entity_pb2.Key()
key_pb.ParseFromString(todo_key)
key_pb._pb.ParseFromString(todo_key)
keys.append(key_pb)

read_options = get_read_options(self.options)
Expand Down Expand Up @@ -264,20 +264,20 @@ def lookup_callback(self, rpc):
if results.deferred:
next_batch = _batch.get_batch(type(self), self.options)
for key in results.deferred:
todo_key = key.SerializeToString()
todo_key = key._pb.SerializeToString()
next_batch.todo.setdefault(todo_key, []).extend(self.todo[todo_key])

# For all missing keys, set result to _NOT_FOUND and let callers decide
# how to handle
for result in results.missing:
todo_key = result.entity.key.SerializeToString()
todo_key = result.entity.key._pb.SerializeToString()
for future in self.todo[todo_key]:
future.set_result(_NOT_FOUND)

# For all found entities, set the result on their corresponding futures
for result in results.found:
entity = result.entity
todo_key = entity.key.SerializeToString()
todo_key = entity.key._pb.SerializeToString()
for future in self.todo[todo_key]:
future.set_result(entity)

Expand Down Expand Up @@ -306,7 +306,7 @@ def _datastore_lookup(keys, read_options, retries=None, timeout=None):
read_options=read_options,
)

return make_call("Lookup", request, retries=retries, timeout=timeout)
return make_call("lookup", request, retries=retries, timeout=timeout)


def get_read_options(options, default_read_consistency=None):
Expand Down Expand Up @@ -375,7 +375,7 @@ def put(entity, options):
lock = yield _cache.global_lock_for_write(cache_key)
else:
expires = context._global_cache_timeout(entity.key, options)
cache_value = entity_pb.SerializeToString()
cache_value = entity_pb._pb.SerializeToString()
yield _cache.global_set(cache_key, cache_value, expires=expires)

if use_datastore:
Expand Down Expand Up @@ -725,7 +725,7 @@ def allocate_ids_callback(self, rpc, mutations, futures):
# Update mutations with complete keys
response = rpc.result()
for mutation, key, future in zip(mutations, response.keys, futures):
mutation.upsert.key.CopyFrom(key)
mutation.upsert.key._pb.CopyFrom(key._pb)
future.set_result(key)

@tasklets.tasklet
Expand Down Expand Up @@ -863,9 +863,9 @@ def _datastore_commit(mutations, transaction, retries=None, timeout=None):
:class:`google.cloud.datastore_v1.datastore_pb2.CommitResponse`
"""
if transaction is None:
mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL
mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
else:
mode = datastore_pb2.CommitRequest.TRANSACTIONAL
mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL

client = context_module.get_context().client
request = datastore_pb2.CommitRequest(
Expand All @@ -875,7 +875,7 @@ def _datastore_commit(mutations, transaction, retries=None, timeout=None):
transaction=transaction,
)

return make_call("Commit", request, retries=retries, timeout=timeout)
return make_call("commit", request, retries=retries, timeout=timeout)


def allocate(keys, options):
Expand Down Expand Up @@ -992,7 +992,7 @@ def _datastore_allocate_ids(keys, retries=None, timeout=None):
client = context_module.get_context().client
request = datastore_pb2.AllocateIdsRequest(project_id=client.project, keys=keys)

return make_call("AllocateIds", request, retries=retries, timeout=timeout)
return make_call("allocate_ids", request, retries=retries, timeout=timeout)


@tasklets.tasklet
Expand Down Expand Up @@ -1048,7 +1048,7 @@ def _datastore_begin_transaction(read_only, retries=None, timeout=None):
project_id=client.project, transaction_options=options
)

return make_call("BeginTransaction", request, retries=retries, timeout=timeout)
return make_call("begin_transaction", request, retries=retries, timeout=timeout)


@tasklets.tasklet
Expand Down Expand Up @@ -1089,4 +1089,4 @@ def _datastore_rollback(transaction, retries=None, timeout=None):
project_id=client.project, transaction=transaction
)

return make_call("Rollback", request, retries=retries, timeout=timeout)
return make_call("rollback", request, retries=retries, timeout=timeout)
54 changes: 27 additions & 27 deletions google/cloud/ndb/_datastore_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@

from google.cloud import environment_vars

from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
from google.cloud.datastore_v1.proto import query_pb2
from google.cloud.datastore_v1.types import datastore as datastore_pb2
from google.cloud.datastore_v1.types import entity as entity_pb2
from google.cloud.datastore_v1.types import query as query_pb2
from google.cloud.datastore import helpers, Key

from google.cloud.ndb import context as context_module
Expand All @@ -38,24 +38,24 @@
log = logging.getLogger(__name__)

MoreResultsType = query_pb2.QueryResultBatch.MoreResultsType
NO_MORE_RESULTS = MoreResultsType.Value("NO_MORE_RESULTS")
NOT_FINISHED = MoreResultsType.Value("NOT_FINISHED")
MORE_RESULTS_AFTER_LIMIT = MoreResultsType.Value("MORE_RESULTS_AFTER_LIMIT")
NO_MORE_RESULTS = MoreResultsType.NO_MORE_RESULTS
NOT_FINISHED = MoreResultsType.NOT_FINISHED
MORE_RESULTS_AFTER_LIMIT = MoreResultsType.MORE_RESULTS_AFTER_LIMIT

ResultType = query_pb2.EntityResult.ResultType
RESULT_TYPE_FULL = ResultType.Value("FULL")
RESULT_TYPE_KEY_ONLY = ResultType.Value("KEY_ONLY")
RESULT_TYPE_PROJECTION = ResultType.Value("PROJECTION")
RESULT_TYPE_FULL = ResultType.FULL
RESULT_TYPE_KEY_ONLY = ResultType.KEY_ONLY
RESULT_TYPE_PROJECTION = ResultType.PROJECTION

DOWN = query_pb2.PropertyOrder.DESCENDING
UP = query_pb2.PropertyOrder.ASCENDING
DOWN = query_pb2.PropertyOrder.Direction.DESCENDING
UP = query_pb2.PropertyOrder.Direction.ASCENDING

FILTER_OPERATORS = {
"=": query_pb2.PropertyFilter.EQUAL,
"<": query_pb2.PropertyFilter.LESS_THAN,
"<=": query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL,
">": query_pb2.PropertyFilter.GREATER_THAN,
">=": query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL,
"=": query_pb2.PropertyFilter.Operator.EQUAL,
"<": query_pb2.PropertyFilter.Operator.LESS_THAN,
"<=": query_pb2.PropertyFilter.Operator.LESS_THAN_OR_EQUAL,
">": query_pb2.PropertyFilter.Operator.GREATER_THAN,
">=": query_pb2.PropertyFilter.Operator.GREATER_THAN_OR_EQUAL,
}

_KEY_NOT_IN_CACHE = object()
Expand All @@ -77,7 +77,7 @@ def make_filter(name, op, value):
property=query_pb2.PropertyReference(name=name),
op=FILTER_OPERATORS[op],
)
helpers._set_protobuf_value(filter_pb.value, value)
helpers._set_protobuf_value(filter_pb.value._pb, value)
return filter_pb


Expand All @@ -92,7 +92,7 @@ def make_composite_and_filter(filter_pbs):
query_pb2.CompositeFilter: The new composite filter.
"""
return query_pb2.CompositeFilter(
op=query_pb2.CompositeFilter.AND,
op=query_pb2.CompositeFilter.Operator.AND,
filters=[_filter_pb(filter_pb) for filter_pb in filter_pbs],
)

Expand Down Expand Up @@ -683,7 +683,7 @@ def has_next_async(self):
next_result = result_sets[0].next()

# Check to see if it's a duplicate
hash_key = next_result.result_pb.entity.key.SerializeToString()
hash_key = next_result.result_pb.entity.key._pb.SerializeToString()
if hash_key in self._seen_keys:
continue

Expand Down Expand Up @@ -811,9 +811,9 @@ def _compare(self, other):
).flat_path
else:
this_value_pb = self.result_pb.entity.properties[order.name]
this_value = helpers._get_value_from_value_pb(this_value_pb)
this_value = helpers._get_value_from_value_pb(this_value_pb._pb)
other_value_pb = other.result_pb.entity.properties[order.name]
other_value = helpers._get_value_from_value_pb(other_value_pb)
other_value = helpers._get_value_from_value_pb(other_value_pb._pb)

# Compare key paths if ordering by key property
if isinstance(this_value, Key):
Expand Down Expand Up @@ -935,19 +935,19 @@ def _query_to_protobuf(query):
ancestor_pb = query.ancestor._key.to_protobuf()
ancestor_filter_pb = query_pb2.PropertyFilter(
property=query_pb2.PropertyReference(name="__key__"),
op=query_pb2.PropertyFilter.HAS_ANCESTOR,
op=query_pb2.PropertyFilter.Operator.HAS_ANCESTOR,
)
ancestor_filter_pb.value.key_value.CopyFrom(ancestor_pb)
ancestor_filter_pb.value.key_value._pb.CopyFrom(ancestor_pb._pb)

if filter_pb is None:
filter_pb = ancestor_filter_pb

elif isinstance(filter_pb, query_pb2.CompositeFilter):
filter_pb.filters.add(property_filter=ancestor_filter_pb)
filter_pb.filters._pb.add(property_filter=ancestor_filter_pb._pb)

else:
filter_pb = query_pb2.CompositeFilter(
op=query_pb2.CompositeFilter.AND,
op=query_pb2.CompositeFilter.Operator.AND,
filters=[
_filter_pb(filter_pb),
_filter_pb(ancestor_filter_pb),
Expand All @@ -969,7 +969,7 @@ def _query_to_protobuf(query):
query_pb.offset = query.offset

if query.limit:
query_pb.limit.value = query.limit
query_pb._pb.limit.value = query.limit

return query_pb

Expand Down Expand Up @@ -1016,7 +1016,7 @@ def _datastore_run_query(query):
read_options=read_options,
)
response = yield _datastore_api.make_call(
"RunQuery", request, timeout=query.timeout
"run_query", request, timeout=query.timeout
)
utils.logging_debug(log, response)
raise tasklets.Return(response)
Expand Down
20 changes: 12 additions & 8 deletions google/cloud/ndb/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,13 @@
import os
import requests

from google.api_core import client_info
from google.api_core.gapic_v1 import client_info
from google.cloud import environment_vars
from google.cloud import _helpers
from google.cloud import client as google_client
from google.cloud.datastore_v1.gapic import datastore_client
from google.cloud.datastore_v1.proto import datastore_pb2_grpc
from google.cloud.datastore_v1.services.datastore.transports import (
grpc as datastore_grpc,
)

from google.cloud.ndb import __version__
from google.cloud.ndb import context as context_module
Expand All @@ -35,7 +36,7 @@
user_agent="google-cloud-ndb/{}".format(__version__)
)

DATASTORE_API_HOST = datastore_client.DatastoreClient.SERVICE_ADDRESS.rsplit(":", 1)[0]
DATASTORE_API_HOST = "datastore.googleapis.com"


def _get_gcd_project():
Expand Down Expand Up @@ -114,14 +115,17 @@ def __init__(self, project=None, namespace=None, credentials=None):

if emulator:
channel = grpc.insecure_channel(self.host)

else:
user_agent = _CLIENT_INFO.to_user_agent()
user_agent = self.client_info.to_user_agent()
channel = _helpers.make_secure_channel(
self._credentials, user_agent, self.host
)

self.stub = datastore_pb2_grpc.DatastoreStub(channel)
self.stub = datastore_grpc.DatastoreGrpcTransport(
host=self.host,
credentials=credentials,
client_info=self.client_info,
channel=channel,
)

@contextlib.contextmanager
def context(
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/ndb/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ class Key(object):
>>> reference
app: "example"
path {
Element {
element {
type: "Kind"
id: 1337
}
Expand Down Expand Up @@ -681,13 +681,13 @@ def reference(self):
>>> key = ndb.Key("Trampoline", 88, project="xy", namespace="zt")
>>> key.reference()
app: "xy"
name_space: "zt"
path {
Element {
element {
type: "Trampoline"
id: 88
}
}
name_space: "zt"
<BLANKLINE>
"""
if self._reference is None:
Expand Down
Loading