Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

First pass at Python2.7 compatibility #203

Merged
merged 15 commits into from
Oct 31, 2019
Merged
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ show_missing = True
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
pragma: NO PY${PY_VERSION} COVER
omit =
*/gapic/*.py
*/proto/*.py
4 changes: 3 additions & 1 deletion google/cloud/ndb/_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

"""Support for batching operations."""

from google.cloud.ndb import context as context_module
from google.cloud.ndb import _eventloop


Expand All @@ -35,6 +34,9 @@ def get_batch(batch_cls, options=None):
Returns:
batch_cls: An instance of the batch class.
"""
# prevent circular import in Python 2.7
from google.cloud.ndb import context as context_module

context = context_module.get_context()
batches = context.batches.get(batch_cls)
if batches is None:
Expand Down
12 changes: 9 additions & 3 deletions google/cloud/ndb/_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import collections
import itertools

from google.cloud.ndb import _batch
from google.cloud.ndb import context as context_module
from google.cloud.ndb import tasklets

# For Python 2.7 Compatibility
try:
from collections import UserDict
except ImportError: # pragma: NO PY3 COVER
from UserDict import UserDict


_LOCKED = b"0"
_LOCK_TIME = 32
_PREFIX = b"NDB30"


class ContextCache(collections.UserDict):
class ContextCache(UserDict):
"""A per-context in-memory entity cache.

This cache verifies the fetched entity has the correct key before
Expand Down Expand Up @@ -55,7 +61,7 @@ def _future_result(result):
return future


class _GlobalCacheBatch:
class _GlobalCacheBatch(object):
"""Abstract base for classes used to batch operations for the global cache.
"""

Expand Down
6 changes: 3 additions & 3 deletions google/cloud/ndb/_datastore_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def lookup(key, options):
raise tasklets.Return(entity_pb)


class _LookupBatch:
class _LookupBatch(object):
"""Batch for Lookup requests.

Attributes:
Expand Down Expand Up @@ -456,7 +456,7 @@ def delete(key, options):
yield _cache.global_delete(cache_key)


class _NonTransactionalCommitBatch:
class _NonTransactionalCommitBatch(object):
"""Batch for tracking a set of mutations for a non-transactional commit.

Attributes:
Expand Down Expand Up @@ -858,7 +858,7 @@ def allocate(keys, options):
return batch.add(keys)


class _AllocateIdsBatch:
class _AllocateIdsBatch(object):
"""Batch for AllocateIds requests.

Not related to batch used by transactions to allocate ids for upserts
Expand Down
13 changes: 9 additions & 4 deletions google/cloud/ndb/_datastore_query.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand Down Expand Up @@ -129,7 +130,7 @@ def iterate(query, raw=False):
return _QueryIteratorImpl(query, raw=raw)


class QueryIterator:
class QueryIterator(object):
"""An iterator for query results.

Executes the given query and provides an interface for iterating over
Expand Down Expand Up @@ -502,7 +503,7 @@ def __init__(self, query, raw=False):
query.copy(filters=node, offset=None, limit=None)
for node in query.filters._nodes
]
self._result_sets = [iterate(query, raw=True) for query in queries]
self._result_sets = [iterate(_query, raw=True) for _query in queries]
self._sortable = bool(query.order_by)
self._seen_keys = set()
self._next_result = None
Expand Down Expand Up @@ -616,7 +617,7 @@ def cursor_after(self):


@functools.total_ordering
class _Result:
class _Result(object):
"""A single, sortable query result.

Args:
Expand Down Expand Up @@ -645,6 +646,10 @@ def __eq__(self, other):

return self._compare(other) == 0

def __ne__(self, other):
"""For total ordering. Python 2.7 only."""
return self._compare(other) != 0

def _compare(self, other):
"""Compare this result to another result for sorting.

Expand Down Expand Up @@ -847,7 +852,7 @@ def _datastore_run_query(query):
raise tasklets.Return(response)


class Cursor:
class Cursor(object):
"""Cursor.

A pointer to a place in a sequence of query results. Cursor itself is just
Expand Down
7 changes: 5 additions & 2 deletions google/cloud/ndb/_datastore_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@


@functools.total_ordering
class BlobKey:
class BlobKey(object):
"""Key used to identify a blob in the blobstore.

.. note::
Expand Down Expand Up @@ -78,11 +78,14 @@ def __eq__(self, other):

def __lt__(self, other):
if isinstance(other, BlobKey):
# Python 2.7 does not raise an error when other is None.
if other._blob_key is None:
raise TypeError
return self._blob_key < other._blob_key
elif isinstance(other, bytes):
return self._blob_key < other
else:
return NotImplemented
raise TypeError

def __hash__(self):
return hash(self._blob_key)
12 changes: 9 additions & 3 deletions google/cloud/ndb/_eventloop.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,14 @@
This should handle both asynchronous ``ndb`` objects and arbitrary callbacks.
"""
import collections
import queue
import uuid
import time

from google.cloud.ndb import context as context_module
# Python 2.7 module name change
try:
import queue
except ImportError: # pragma: NO PY3 COVER
import Queue as queue

__all__ = [
"add_idle",
Expand All @@ -47,7 +50,7 @@ def _logging_debug(*args, **kw):
)


class EventLoop:
class EventLoop(object):
"""An event loop.

Instances of ``EventLoop`` are used to coordinate single threaded execution
Expand Down Expand Up @@ -365,6 +368,9 @@ def get_event_loop():
Returns:
EventLoop: The event loop for the current context.
"""
# Prevent circular import in Python 2.7
from google.cloud.ndb import context as context_module

context = context_module.get_context()
return context.eventloop

Expand Down
37 changes: 21 additions & 16 deletions google/cloud/ndb/_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
"""Support for options."""

import functools
import inspect
import itertools
import logging

Expand All @@ -24,7 +23,7 @@
log = logging.getLogger(__name__)


class Options:
class Options(object):
__slots__ = (
# Supported
"retries",
Expand All @@ -37,19 +36,19 @@ class Options:
"force_writes",
"max_memcache_items",
"propagation",
"deadline",
"use_memcache",
"memcache_timeout",
)

@classmethod
def options(cls, wrapped):
# If there are any positional arguments, get their names
slots = set(cls.slots())
signature = inspect.signature(wrapped)
positional = [
name
for name, parameter in signature.parameters.items()
if parameter.kind
in (parameter.POSITIONAL_ONLY, parameter.POSITIONAL_OR_KEYWORD)
]
# If there are any positional arguments, get their names.
# inspect.signature is not available in Python 2.7, so we use the
# arguments obtained with inspect.getargspec, which come from the
# positional decorator used with all query_options decorated methods.
positional = getattr(wrapped, "_positional_names", [])

# We need for any non-option arguments to come before any option
# arguments
Expand Down Expand Up @@ -84,11 +83,10 @@ def wrapper(*args, **kwargs):

# If another function that uses options is delegating to this one,
# we'll already have options.
_options = kwargs.pop("_options", None)
if not _options:
_options = cls(**kw_options)
if "_options" not in kwargs:
kwargs["_options"] = cls(**kw_options)

return wrapped(*pass_args, _options=_options, **kwargs)
return wrapped(*pass_args, **kwargs)

return wrapper

Expand All @@ -97,7 +95,7 @@ def slots(cls):
return itertools.chain(
*(
ancestor.__slots__
for ancestor in cls.mro()
for ancestor in cls.__mro__
if hasattr(ancestor, "__slots__")
)
)
Expand Down Expand Up @@ -172,6 +170,13 @@ def __eq__(self, other):

return True

def __ne__(self, other):
# required for Python 2.7 compatibility
result = self.__eq__(other)
if result is NotImplemented:
result = False
return not result

def __repr__(self):
options = ", ".join(
[
Expand All @@ -191,7 +196,7 @@ def items(self):


class ReadOptions(Options):
__slots__ = ("read_consistency", "transaction")
__slots__ = ("read_consistency", "read_policy", "transaction")

def __init__(self, config=None, **kwargs):
read_policy = kwargs.pop("read_policy", None)
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/ndb/_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from google.cloud.ndb import exceptions


class RemoteCall:
class RemoteCall(object):
"""Represents a remote call.

This is primarily a wrapper for futures returned by gRPC. This holds some
Expand Down
13 changes: 11 additions & 2 deletions google/cloud/ndb/_retry.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,15 @@
_DEFAULT_RETRIES = 3


def wraps_safely(obj, attr_names=functools.WRAPPER_ASSIGNMENTS):
"""Python 2.7 functools.wraps has a bug where attributes like ``module``
are not copied to the wrappers and thus cause attribute errors. This
wrapper prevents that problem."""
return functools.wraps(
obj, assigned=(name for name in attr_names if hasattr(obj, name))
)


def retry_async(callback, retries=_DEFAULT_RETRIES):
"""Decorator for retrying functions or tasklets asynchronously.

Expand All @@ -49,7 +58,7 @@ def retry_async(callback, retries=_DEFAULT_RETRIES):
"""

@tasklets.tasklet
@functools.wraps(callback)
@wraps_safely(callback)
def retry_wrapper(*args, **kwargs):
sleep_generator = core_retry.exponential_sleep_generator(
_DEFAULT_INITIAL_DELAY,
Expand All @@ -66,7 +75,7 @@ def retry_wrapper(*args, **kwargs):
# `e` is removed from locals at end of block
error = e # See: https://goo.gl/5J8BMK
if not is_transient_error(error):
raise
raise error
else:
raise tasklets.Return(result)

Expand Down
15 changes: 11 additions & 4 deletions google/cloud/ndb/_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

import functools

from google.cloud.ndb import context as context_module
from google.cloud.ndb import _datastore_api
from google.cloud.ndb import exceptions
from google.cloud.ndb import _retry
from google.cloud.ndb import tasklets
Expand All @@ -28,6 +26,9 @@ def in_transaction():
bool: :data:`True` if there is a transaction for the current context,
otherwise :data:`False`.
"""
# Avoid circular import in Python 2.7
from google.cloud.ndb import context as context_module

return context_module.get_context().transaction is not None


Expand Down Expand Up @@ -73,6 +74,9 @@ def transaction_async(

This is the asynchronous version of :func:`transaction`.
"""
# Avoid circular import in Python 2.7
from google.cloud.ndb import context as context_module

if propagation is not None:
raise exceptions.NoLongerImplementedError()

Expand All @@ -94,6 +98,9 @@ def transaction_async(

@tasklets.tasklet
def _transaction_async(context, callback, read_only=False):
# Avoid circular import in Python 2.7
from google.cloud.ndb import _datastore_api

# Start the transaction
transaction_id = yield _datastore_api.begin_transaction(
read_only, retries=0
Expand All @@ -114,9 +121,9 @@ def _transaction_async(context, callback, read_only=False):
yield _datastore_api.commit(transaction_id, retries=0)

# Rollback if there is an error
except: # noqa: E722
except Exception as e: # noqa: E722
yield _datastore_api.rollback(transaction_id)
raise
raise e

tx_context._clear_global_cache()
for callback in on_commit_callbacks:
Expand Down
Loading