Skip to content

Commit

Permalink
Backwards compatibility for structured props.
Browse files Browse the repository at this point in the history
  • Loading branch information
Chris Rossi committed Sep 3, 2019
1 parent 34b2517 commit 0bdcbca
Show file tree
Hide file tree
Showing 6 changed files with 401 additions and 23 deletions.
4 changes: 4 additions & 0 deletions google/cloud/ndb/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ def context(
global_cache=None,
global_cache_policy=None,
global_cache_timeout_policy=None,
legacy_data=True,
):
"""Establish a context for a set of NDB calls.
Expand Down Expand Up @@ -157,13 +158,16 @@ def context(
global_cache_timeout_policy (Optional[Callable[[key.Key], int]]):
The global cache timeout to use in this context. See:
:meth:`~google.cloud.ndb.context.Context.set_global_cache_timeout_policy`.
legacy_data (bool): Set to ``True`` (the default) to write data in
a way that can be read by the legacy version of NDB.
"""
context = context_module.Context(
self,
cache_policy=cache_policy,
global_cache=global_cache,
global_cache_policy=global_cache_policy,
global_cache_timeout_policy=global_cache_timeout_policy,
legacy_data=legacy_data,
)
with context.use():
yield context
Expand Down
3 changes: 3 additions & 0 deletions google/cloud/ndb/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ def policy(key):
"cache",
"global_cache",
"on_commit_callbacks",
"legacy_data",
],
)

Expand Down Expand Up @@ -180,6 +181,7 @@ def __new__(
global_cache_timeout_policy=None,
datastore_policy=None,
on_commit_callbacks=None,
legacy_data=True,
):
if eventloop is None:
eventloop = _eventloop.EventLoop()
Expand Down Expand Up @@ -210,6 +212,7 @@ def __new__(
cache=new_cache,
global_cache=global_cache,
on_commit_callbacks=on_commit_callbacks,
legacy_data=legacy_data,
)

context.set_cache_policy(cache_policy)
Expand Down
122 changes: 100 additions & 22 deletions google/cloud/ndb/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -646,6 +646,25 @@ def _entity_from_protobuf(protobuf):
return _entity_from_ds_entity(ds_entity)


def _properties_of(entity):
seen = set()

for cls in type(entity).mro():
if not hasattr(cls, "_properties"):
continue

for prop in cls._properties.values():
if (
not isinstance(prop, Property)
or isinstance(prop, ModelKey)
or prop._name in seen
):
continue

seen.add(prop._name)
yield prop


def _entity_to_ds_entity(entity, set_key=True):
"""Convert an NDB entity to Datastore entity.
Expand All @@ -662,33 +681,20 @@ def _entity_to_ds_entity(entity, set_key=True):
uninitialized = []
exclude_from_indexes = []

for cls in type(entity).mro():
if not hasattr(cls, "_properties"):
continue

for prop in cls._properties.values():
if (
not isinstance(prop, Property)
or isinstance(prop, ModelKey)
or prop._name in data
):
continue

if not prop._is_initialized(entity):
uninitialized.append(prop._name)
for prop in _properties_of(entity):
if not prop._is_initialized(entity):
uninitialized.append(prop._name)

value = prop._get_base_value_unwrapped_as_list(entity)
if not prop._repeated:
value = value[0]
data[prop._name] = value
names = prop._to_datastore(entity, data)

if not prop._indexed:
exclude_from_indexes.append(prop._name)
if not prop._indexed:
for name in names:
exclude_from_indexes.append(name)

if uninitialized:
names = ", ".join(uninitialized)
missing = ", ".join(uninitialized)
raise exceptions.BadValueError(
"Entity has uninitialized properties: {}".format(names)
"Entity has uninitialized properties: {}".format(missing)
)

ds_entity = None
Expand Down Expand Up @@ -1984,6 +1990,38 @@ def _get_for_dict(self, entity):
"""
return self._get_value(entity)

def _to_datastore(self, entity, data, prefix="", repeated=False):
"""Helper to convert property to Datastore serializable data.
Called to help assemble a Datastore entity prior to serialization for
storage. Subclasses (like StructuredProperty) may need to override the
default behavior.
Args:
entity (entity.Entity): The NDB entity to convert.
data (dict): The data that will eventually be used to construct the
Datastore entity. This method works by updating ``data``.
prefix (str): Optional name prefix used for StructuredProperty (if
present, must end in ".".
repeated (bool): `True` if values should be repeated because an
ancestor node is repeated property.
Return:
Sequence[str]: Any keys that were set on ``data`` by this method
call.
"""
value = self._get_base_value_unwrapped_as_list(entity)
if not self._repeated:
value = value[0]

key = prefix + self._name
if repeated:
data.setdefault(key, []).append(value)
else:
data[key] = value

return (key,)


def _validate_key(value, entity=None):
"""Validate a key.
Expand Down Expand Up @@ -3861,6 +3899,46 @@ def _get_value_size(self, entity):
values = [values]
return len(values)

def _to_datastore(self, entity, data, prefix="", repeated=False):
"""Override of :method:`StructuredProperty._to_datastore`.
If ``legacy_data`` is ``True``, then we need to override the default
behavior to store everything in a single Datastore entity that uses
dotted attribute names, rather than nesting entities.
"""
context = context_module.get_context()

# The easy way
if not context.legacy_data:
return super(StructuredProperty, self)._to_datastore(
entity, data, prefix=prefix, repeated=repeated
)

# The hard way
next_prefix = prefix + self._name + "."
next_repeated = repeated or self._repeated
keys = []

values = self._get_user_value(entity)
if not self._repeated:
values = (values,)

for value in values:
if value is None:
keys.extend(
super(StructuredProperty, self)._to_datastore(
entity, data, prefix=prefix, repeated=repeated
)
)
continue

for prop in _properties_of(value):
keys.extend(prop._to_datastore(
value, data, prefix=next_prefix, repeated=next_repeated
))

return keys


class LocalStructuredProperty(BlobProperty):
"""A property that contains ndb.Model value.
Expand Down
5 changes: 4 additions & 1 deletion tests/system/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,5 +107,8 @@ def namespace():
@pytest.fixture
def client_context(namespace):
client = ndb.Client(namespace=namespace)
with client.context(cache_policy=False) as the_context:
with client.context(
cache_policy=False,
legacy_data=False,
) as the_context:
yield the_context
30 changes: 30 additions & 0 deletions tests/system/test_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -728,6 +728,36 @@ class SomeKind(ndb.Model):
dispose_of(key._key)


def test_insert_entity_with_structured_property_legacy_data(
client_context, dispose_of, ds_client
):
class OtherKind(ndb.Model):
one = ndb.StringProperty()
two = ndb.StringProperty()

class SomeKind(ndb.Model):
foo = ndb.IntegerProperty()
bar = ndb.StructuredProperty(OtherKind)

with client_context.new(legacy_data=True).use():
entity = SomeKind(foo=42, bar=OtherKind(one="hi", two="mom"))
key = entity.put()

retrieved = key.get()
assert retrieved.foo == 42
assert retrieved.bar.one == "hi"
assert retrieved.bar.two == "mom"

assert isinstance(retrieved.bar, OtherKind)

ds_entity = ds_client.get(key._key)
assert ds_entity["foo"] == 42
assert ds_entity["bar.one"] == "hi"
assert ds_entity["bar.two"] == "mom"

dispose_of(key._key)


@pytest.mark.usefixtures("client_context")
def test_retrieve_entity_with_legacy_structured_property(ds_entity):
class OtherKind(ndb.Model):
Expand Down
Loading

0 comments on commit 0bdcbca

Please sign in to comment.