Skip to content
2 changes: 1 addition & 1 deletion packages/google-cloud-ndb/google/cloud/ndb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from google.cloud.ndb import version

__version__ = version.__version__
__version__: str = version.__version__

from google.cloud.ndb.client import Client
from google.cloud.ndb.context import AutoBatcher
Expand Down
23 changes: 11 additions & 12 deletions packages/google-cloud-ndb/google/cloud/ndb/_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,18 +117,18 @@ def done_callback(self, cache_call):
"""
exception = cache_call.exception()
if exception:
for future in self.futures:
for future in self.futures: # type: ignore[attr-defined]
future.set_exception(exception)

else:
for future in self.futures:
for future in self.futures: # type: ignore[attr-defined]
future.set_result(None)

def make_call(self):
"""Make the actual call to the global cache. To be overridden."""
raise NotImplementedError

def future_info(self, key):
def future_info(self, key, value=None):
"""Generate info string for Future. To be overridden."""
raise NotImplementedError

Expand Down Expand Up @@ -279,7 +279,7 @@ def make_call(self):
"""Call :method:`GlobalCache.get`."""
return _global_cache().get(self.keys)

def future_info(self, key):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.get({})".format(key)

Expand Down Expand Up @@ -373,7 +373,7 @@ def make_call(self):
"""Call :method:`GlobalCache.set`."""
return _global_cache().set(self.todo, expires=self.expires)

def future_info(self, key, value):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.set({}, {})".format(key, value)

Expand Down Expand Up @@ -436,7 +436,7 @@ def make_call(self):
"""Call :method:`GlobalCache.set`."""
return _global_cache().set_if_not_exists(self.todo, expires=self.expires)

def future_info(self, key, value):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.set_if_not_exists({}, {})".format(key, value)

Expand Down Expand Up @@ -482,7 +482,7 @@ def make_call(self):
"""Call :method:`GlobalCache.delete`."""
return _global_cache().delete(self.keys)

def future_info(self, key):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.delete({})".format(key)

Expand Down Expand Up @@ -513,7 +513,7 @@ def make_call(self):
"""Call :method:`GlobalCache.watch`."""
return _global_cache().watch(self.todo)

def future_info(self, key, value):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.watch({}, {})".format(key, value)

Expand Down Expand Up @@ -543,7 +543,7 @@ def make_call(self):
"""Call :method:`GlobalCache.unwatch`."""
return _global_cache().unwatch(self.keys)

def future_info(self, key):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.unwatch({})".format(key)

Expand Down Expand Up @@ -580,7 +580,7 @@ def make_call(self):
"""Call :method:`GlobalCache.compare_and_swap`."""
return _global_cache().compare_and_swap(self.todo, expires=self.expires)

def future_info(self, key, value):
def future_info(self, key, value=None):
"""Generate info string for Future."""
return "GlobalCache.compare_and_swap({}, {})".format(key, value)

Expand Down Expand Up @@ -627,8 +627,7 @@ def global_lock_for_write(key):
tasklets.Future: Eventual result will be a lock value to be used later with
:func:`global_unlock`.
"""
lock = "." + str(uuid.uuid4())
lock = lock.encode("ascii")
lock = ("." + str(uuid.uuid4())).encode("ascii")
utils.logging_debug(log, "lock for write: {}", lock)

def new_value(old_value):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def lookup(key, options):
if use_global_cache and not key_locked:
if entity_pb is not _NOT_FOUND:
expires = context._global_cache_timeout(key, options)
serialized = entity_pb._pb.SerializeToString()
serialized = entity_pb._pb.SerializeToString() # type: ignore[attr-defined]
yield _cache.global_compare_and_swap(
cache_key, serialized, expires=expires
)
Expand Down
20 changes: 17 additions & 3 deletions packages/google-cloud-ndb/google/cloud/ndb/_datastore_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,10 @@ def has_next_async(self):
if self._batch is None:
yield self._next_batch() # First time

if self._batch is None:
raise TypeError("self._batch cannot be None")
if self._index is None:
raise TypeError("self._index cannot be None")
if self._index < len(self._batch):
raise tasklets.Return(True)

Expand All @@ -359,7 +363,9 @@ def probably_has_next(self):
return (
self._batch is None # Haven't even started yet
or self._has_next_batch # There's another batch to fetch
or self._index < len(self._batch) # Not done with current batch
or (
self._index is not None and self._index < len(self._batch)
) # Not done with current batch
)

@tasklets.tasklet
Expand Down Expand Up @@ -421,6 +427,10 @@ def next(self):
self._cursor_before = None
raise StopIteration

if self._batch is None:
raise TypeError("self._batch cannot be None")
if self._index is None:
raise TypeError("self._index cannot be None")
# Won't block
next_result = self._batch[self._index]
self._index += 1
Expand All @@ -446,7 +456,7 @@ def _peek(self):
batch = self._batch
index = self._index

if batch and index < len(batch):
if batch and index is not None and index < len(batch):
return batch[index]

raise KeyError(index)
Expand Down Expand Up @@ -554,6 +564,8 @@ def next(self):
if not self.has_next():
raise StopIteration()

if self._next_result is None:
raise TypeError("self._next_result cannot be None")
# Won't block
next_result = self._next_result
self._next_result = None
Expand Down Expand Up @@ -718,6 +730,8 @@ def next(self):
if not self.has_next():
raise StopIteration()

if self._next_result is None:
raise TypeError("self._next_result cannot be None")
# Won't block
next_result = self._next_result
self._next_result = None
Expand Down Expand Up @@ -949,7 +963,7 @@ def _query_to_protobuf(query):
filter_pb = ancestor_filter_pb

elif isinstance(filter_pb, query_pb2.CompositeFilter):
filter_pb.filters._pb.add(property_filter=ancestor_filter_pb._pb)
filter_pb.filters._pb.add(property_filter=ancestor_filter_pb._pb) # type: ignore[attr-defined]

else:
filter_pb = query_pb2.CompositeFilter(
Expand Down
11 changes: 7 additions & 4 deletions packages/google-cloud-ndb/google/cloud/ndb/_eventloop.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,12 +116,15 @@ class EventLoop(object):
"""

def __init__(self):
self.current = collections.deque()
self.idlers = collections.deque()
self._init()

def _init(self):
self.current: collections.deque = collections.deque()
self.idlers: collections.deque = collections.deque()
self.inactive = 0
self.queue = []
self.rpcs = {}
self.rpc_results = queue.Queue()
self.rpc_results: queue.Queue = queue.Queue()

def clear(self):
"""Remove all pending events without running any."""
Expand All @@ -139,7 +142,7 @@ def clear(self):
utils.logging_debug(log, " queue = {}", queue)
if rpcs:
utils.logging_debug(log, " rpcs = {}", rpcs)
self.__init__()
self._init()
current.clear()
idlers.clear()
queue[:] = []
Expand Down
29 changes: 21 additions & 8 deletions packages/google-cloud-ndb/google/cloud/ndb/_gql.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import datetime
import re
import time
from typing import Any

from google.cloud.ndb import context as context_module
from google.cloud.ndb import exceptions
Expand Down Expand Up @@ -408,7 +409,8 @@ def _AddProcessedParameterFilter(self, identifier, condition, operator, paramete
if identifier.lower() == "ancestor":
self._has_ancestor = True
filter_rule = (self._ANCESTOR, "is")
assert condition.lower() == "is"
if condition.lower() != "is":
raise ValueError("condition must be 'is'")

if operator == "list" and condition.lower() not in ["in", "not_in"]:
self._Error("Only IN can process a list of values, given '%s'" % condition)
Expand Down Expand Up @@ -485,7 +487,7 @@ def _Literal(self):
a string, integer, floating point value, boolean or None).
"""

literal = None
literal: Any = None

if self._next_symbol < len(self._symbols):
try:
Expand Down Expand Up @@ -770,27 +772,38 @@ def _raise_cast_error(message):


def _time_function(values):
t_tuple: tuple[int, ...]
if len(values) == 1:
value = values[0]
if isinstance(value, str):
try:
time_tuple = time.strptime(value, "%H:%M:%S")
parsed_time = time.strptime(value, "%H:%M:%S")
except ValueError as error:
_raise_cast_error(
"Error during time conversion, {}, {}".format(error, values)
)
time_tuple = time_tuple[3:]
time_tuple = time_tuple[0:3]
t_tuple = (
parsed_time.tm_hour,
parsed_time.tm_min,
parsed_time.tm_sec,
)
elif isinstance(value, int):
time_tuple = (value,)
t_tuple = (value,)
else:
_raise_cast_error("Invalid argument for time(), {}".format(value))
elif len(values) < 4:
time_tuple = tuple(values)
t_tuple = tuple(values)
else:
_raise_cast_error("Too many arguments for time(), {}".format(values))
try:
return datetime.time(*time_tuple)
if len(t_tuple) == 1:
return datetime.time(t_tuple[0])
elif len(t_tuple) == 2:
return datetime.time(t_tuple[0], t_tuple[1])
elif len(t_tuple) == 3:
return datetime.time(t_tuple[0], t_tuple[1], t_tuple[2])
else:
_raise_cast_error("Invalid arguments for time()")
except ValueError as error:
_raise_cast_error("Error during time conversion, {}, {}".format(error, values))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -389,11 +389,10 @@ class Property(ProtocolBuffer.ProtocolMessage):
24: "EMPTY_LIST",
}

@classmethod
def Meaning_Name(cls, x):
return cls._Meaning_NAMES.get(x, "")

Meaning_Name = classmethod(Meaning_Name)

has_meaning_ = 0
meaning_ = 0
has_meaning_uri_ = 0
Expand Down Expand Up @@ -526,7 +525,7 @@ class Path_Element(ProtocolBuffer.ProtocolMessage):
def type(self):
# Force legacy byte-str to be a str.
if type(self.type_) is bytes:
return self.type_.decode()
return self.type_.decode() # type: ignore[attr-defined]
return self.type_

def set_type(self, x):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@ def MergePartialFromString(self, s):
d = Decoder(a, 0, len(a))
self.TryMerge(d)

def TryMerge(self, d):
raise NotImplementedError


class Decoder:
NUMERIC = 0
Expand Down
5 changes: 5 additions & 0 deletions packages/google-cloud-ndb/google/cloud/ndb/_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,16 @@
import logging

from google.cloud.ndb import exceptions
from typing import Any

log = logging.getLogger(__name__)


class Options(object):
max_memcache_items: Any
force_writes: Any
propagation: Any

__slots__ = (
# Supported
"retries",
Expand Down
2 changes: 1 addition & 1 deletion packages/google-cloud-ndb/google/cloud/ndb/_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def __init__(self, future, info):
self.future = future
self.info = info
self.start_time = time.time()
self.elapsed_time = 0
self.elapsed_time = 0.0

def record_time(future):
self.elapsed_time = time.time() - self.start_time
Expand Down
6 changes: 3 additions & 3 deletions packages/google-cloud-ndb/google/cloud/ndb/_retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def wraps_safely(obj, attr_names=functools.WRAPPER_ASSIGNMENTS):
are not copied to the wrappers and thus cause attribute errors. This
wrapper prevents that problem."""
return functools.wraps(
obj, assigned=(name for name in attr_names if hasattr(obj, name))
obj, assigned=tuple(name for name in attr_names if hasattr(obj, name))
)


Expand Down Expand Up @@ -84,7 +84,7 @@ def retry_wrapper(*args, **kwargs):
error = e
except BaseException as e:
# `e` is removed from locals at end of block
error = e # See: https://goo.gl/5J8BMK
error = e # type: ignore[assignment] # See: https://goo.gl/5J8BMK

if not is_transient_error(error):
# If we are in an inner retry block, use special nested
Expand All @@ -107,7 +107,7 @@ def retry_wrapper(*args, **kwargs):

# Unknown errors really want to show up as None, so manually set the error.
if isinstance(error, core_exceptions.Unknown):
error = "google.api_core.exceptions.Unknown"
error = "google.api_core.exceptions.Unknown" # type: ignore[assignment]

raise core_exceptions.RetryError(
"Maximum number of {} retries exceeded while calling {}".format(
Expand Down
4 changes: 2 additions & 2 deletions packages/google-cloud-ndb/google/cloud/ndb/_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,8 +257,8 @@ def _transaction_async(context, callback, read_only=False):
transaction_id = yield _datastore_api.begin_transaction(read_only, retries=0)
utils.logging_debug(log, "Transaction Id: {}", transaction_id)

on_commit_callbacks = []
transaction_complete_callbacks = []
on_commit_callbacks: list = []
transaction_complete_callbacks: list = []
tx_context = context.new(
transaction=transaction_id,
on_commit_callbacks=on_commit_callbacks,
Expand Down
Loading
Loading