diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 7970c78c5136..ea20e12b0b54 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -25,19 +25,10 @@ class _PropertyMixin(object): """Abstract mixin for cloud storage classes with associated propertties. Non-abstract subclasses should implement: - - CUSTOM_PROPERTY_ACCESSORS - connection - path """ - CUSTOM_PROPERTY_ACCESSORS = None - """Mapping of field name -> accessor for fields w/ custom accessors. - - Expected to be set by subclasses. Fields in this mapping will cause - :meth:`_get_property()` to raise a KeyError with a message to use the - relevant accessor methods. - """ - @property def connection(self): """Abstract getter for the connection to use.""" @@ -64,12 +55,11 @@ def __init__(self, name=None, properties=None): @property def properties(self): - """Ensure properties are loaded, and return a copy. + """Return a copy of properties. :rtype: dict + :returns: Copy of properties. """ - if not self._properties: - self._reload_properties() return self._properties.copy() @property @@ -131,30 +121,6 @@ def _patch_properties(self, properties): query_params={'projection': 'full'}) return self - def _get_property(self, field, default=None): - """Return the value of a field from the server-side representation. - - If you request a field that isn't available, and that field can - be retrieved by refreshing data from Cloud Storage, this method - will reload the data using :func:`_PropertyMixin._reload_properties`. - - :type field: string - :param field: A particular field to retrieve from properties. - - :type default: anything - :param default: The value to return if the field provided wasn't found. - - :rtype: anything - :returns: value of the specific field, or the default if not found. - """ - # Raise for fields which have custom accessors. - custom = self.CUSTOM_PROPERTY_ACCESSORS.get(field) - if custom is not None: - message = "Use '%s' or related methods instead." % custom - raise KeyError((field, message)) - - return self.properties.get(field, default) - class _PropertyBatch(object): """Context manager: Batch updates to object's ``_patch_properties`` diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index dd0bb72d3909..31964b76bc8f 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -52,29 +52,6 @@ class Blob(_PropertyMixin): :param properties: All the other data provided by Cloud Storage. """ - CUSTOM_PROPERTY_ACCESSORS = { - 'acl': 'acl', - 'cacheControl': 'cache_control', - 'contentDisposition': 'content_disposition', - 'contentEncoding': 'content_encoding', - 'contentLanguage': 'content_language', - 'contentType': 'content_type', - 'componentCount': 'component_count', - 'etag': 'etag', - 'generation': 'generation', - 'id': 'id', - 'mediaLink': 'media_link', - 'metageneration': 'metageneration', - 'name': 'name', - 'owner': 'owner', - 'selfLink': 'self_link', - 'size': 'size', - 'storageClass': 'storage_class', - 'timeDeleted': 'time_deleted', - 'updated': 'updated', - } - """Map field name -> accessor for fields w/ custom accessors.""" - CHUNK_SIZE = 1024 * 1024 # 1 MB. """The size of a chunk of data whenever iterating (1 MB). diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index cba3b43ced62..344c83971d06 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -86,26 +86,6 @@ class Bucket(_PropertyMixin): _MAX_OBJECTS_FOR_BUCKET_DELETE = 256 """Maximum number of existing objects allowed in Bucket.delete().""" - CUSTOM_PROPERTY_ACCESSORS = { - 'acl': 'acl', - 'cors': 'get_cors()', - 'defaultObjectAcl': 'get_default_object_acl()', - 'etag': 'etag', - 'id': 'id', - 'lifecycle': 'get_lifecycle()', - 'location': 'location', - 'logging': 'get_logging()', - 'metageneration': 'metageneration', - 'name': 'name', - 'owner': 'owner', - 'projectNumber': 'project_number', - 'selfLink': 'self_link', - 'storageClass': 'storage_class', - 'timeCreated': 'time_created', - 'versioning': 'versioning_enabled', - } - """Map field name -> accessor for fields w/ custom accessors.""" - # ACL rules are lazily retrieved. _acl = _default_object_acl = None @@ -594,6 +574,7 @@ def get_logging(self): :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix`` (if logging is enabled), or None (if not). """ + self._reload_properties() info = self.properties.get('logging') if info is not None: return info.copy() diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index d98634ea5c13..74db00721e9e 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -24,10 +24,9 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def _derivedClass(self, connection=None, path=None, **custom_fields): + def _derivedClass(self, connection=None, path=None): class Derived(self._getTargetClass()): - CUSTOM_PROPERTY_ACCESSORS = custom_fields @property def connection(self): @@ -39,7 +38,7 @@ def path(self): return Derived - def test_connetction_is_abstract(self): + def test_connection_is_abstract(self): mixin = self._makeOne() self.assertRaises(NotImplementedError, lambda: mixin.connection) @@ -47,10 +46,6 @@ def test_path_is_abstract(self): mixin = self._makeOne() self.assertRaises(NotImplementedError, lambda: mixin.path) - def test_properties_eager(self): - derived = self._derivedClass()(properties={'extant': False}) - self.assertEqual(derived.properties, {'extant': False}) - def test_batch(self): connection = _Connection({'foo': 'Qux', 'bar': 'Baz'}) derived = self._derivedClass(connection, '/path')() @@ -65,9 +60,11 @@ def test_batch(self): self.assertEqual(kw[0]['data'], {'foo': 'Qux', 'bar': 'Baz'}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_properties_lazy(self): + def test_properties_no_fetch(self): connection = _Connection({'foo': 'Foo'}) derived = self._derivedClass(connection, '/path')() + self.assertEqual(derived.properties, {}) + derived._reload_properties() self.assertEqual(derived.properties, {'foo': 'Foo'}) kw = connection._requested self.assertEqual(len(kw), 1) @@ -86,40 +83,6 @@ def test__reload_properties(self): self.assertEqual(kw[0]['path'], '/path') self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - def test__get_property_eager_hit(self): - derived = self._derivedClass()(properties={'foo': 'Foo'}) - self.assertEqual(derived._get_property('foo'), 'Foo') - - def test__get_property_eager_miss_w_default(self): - connection = _Connection({'foo': 'Foo'}) - derived = self._derivedClass(connection, '/path')() - default = object() - self.assertTrue(derived._get_property('nonesuch', default) is default) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/path') - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test__get_property_lazy_hit(self): - connection = _Connection({'foo': 'Foo'}) - derived = self._derivedClass(connection, '/path')() - self.assertTrue(derived._get_property('nonesuch') is None) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/path') - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test__get_property_w_custom_field(self): - derived = self._derivedClass(foo='get_foo')() - try: - derived._get_property('foo') - except KeyError as e: - self.assertTrue('get_foo' in str(e)) - else: # pragma: NO COVER - self.assert_('KeyError not raised') - def test__patch_properties(self): connection = _Connection({'foo': 'Foo'}) derived = self._derivedClass(connection, '/path')() @@ -141,11 +104,10 @@ def _getTargetClass(self): def _makeOne(self, wrapped): return self._getTargetClass()(wrapped) - def _makeWrapped(self, connection=None, path=None, **custom_fields): + def _makeWrapped(self, connection=None, path=None): from gcloud.storage._helpers import _PropertyMixin class Wrapped(_PropertyMixin): - CUSTOM_PROPERTY_ACCESSORS = custom_fields @property def connection(self): diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 4bef81ad5f4d..766d2e949781 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -633,6 +633,7 @@ def test_get_cors_lazy(self): after = {'cors': [CORS_ENTRY]} connection = _Connection(after) bucket = self._makeOne(connection, NAME) + bucket._reload_properties() entries = bucket.get_cors() self.assertEqual(len(entries), 1) self.assertEqual(entries[0]['maxAgeSeconds'], @@ -725,6 +726,7 @@ def test_get_lifecycle_lazy(self): after = {'lifecycle': {'rule': [LC_RULE]}} connection = _Connection(after) bucket = self._makeOne(connection, NAME) + bucket._reload_properties() entries = bucket.get_lifecycle() self.assertEqual(len(entries), 1) self.assertEqual(entries[0]['action']['type'], 'Delete') @@ -776,30 +778,22 @@ def test_location_setter(self): self.assertEqual(kw[0]['data'], {'location': 'AS'}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_get_logging_eager_w_prefix(self): + def test_get_logging_w_prefix(self): NAME = 'name' LOG_BUCKET = 'logs' LOG_PREFIX = 'pfx' before = { - 'logging': {'logBucket': LOG_BUCKET, - 'logObjectPrefix': LOG_PREFIX}} - connection = _Connection() - bucket = self._makeOne(connection, NAME, before) - info = bucket.get_logging() - self.assertEqual(info['logBucket'], LOG_BUCKET) - self.assertEqual(info['logObjectPrefix'], LOG_PREFIX) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_logging_lazy_wo_prefix(self): - NAME = 'name' - LOG_BUCKET = 'logs' - after = {'logging': {'logBucket': LOG_BUCKET}} - connection = _Connection(after) + 'logging': { + 'logBucket': LOG_BUCKET, + 'logObjectPrefix': LOG_PREFIX, + }, + } + resp_to_reload = before + connection = _Connection(resp_to_reload) bucket = self._makeOne(connection, NAME) info = bucket.get_logging() self.assertEqual(info['logBucket'], LOG_BUCKET) - self.assertEqual(info.get('logObjectPrefix'), None) + self.assertEqual(info['logObjectPrefix'], LOG_PREFIX) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') @@ -810,8 +804,12 @@ def test_enable_logging_defaults(self): NAME = 'name' LOG_BUCKET = 'logs' before = {'logging': None} - after = {'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': ''}} - connection = _Connection(after) + resp_to_reload = before + resp_to_enable_logging = { + 'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': ''}, + } + connection = _Connection(resp_to_reload, resp_to_enable_logging, + resp_to_enable_logging) bucket = self._makeOne(connection, NAME, before) self.assertTrue(bucket.get_logging() is None) bucket.enable_logging(LOG_BUCKET) @@ -819,20 +817,30 @@ def test_enable_logging_defaults(self): self.assertEqual(info['logBucket'], LOG_BUCKET) self.assertEqual(info['logObjectPrefix'], '') kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(len(kw), 3) + self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], after) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + self.assertEqual(kw[1]['method'], 'PATCH') + self.assertEqual(kw[1]['path'], '/b/%s' % NAME) + self.assertEqual(kw[1]['data'], resp_to_enable_logging) + self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[2]['method'], 'GET') + self.assertEqual(kw[2]['path'], '/b/%s' % NAME) + self.assertEqual(kw[2]['query_params'], {'projection': 'noAcl'}) def test_enable_logging_explicit(self): NAME = 'name' LOG_BUCKET = 'logs' LOG_PFX = 'pfx' before = {'logging': None} - after = { - 'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': LOG_PFX}} - connection = _Connection(after) + resp_to_reload = before + resp_to_enable_logging = { + 'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': LOG_PFX}, + } + connection = _Connection(resp_to_reload, + resp_to_enable_logging, + resp_to_enable_logging) bucket = self._makeOne(connection, NAME, before) self.assertTrue(bucket.get_logging() is None) bucket.enable_logging(LOG_BUCKET, LOG_PFX) @@ -840,27 +848,41 @@ def test_enable_logging_explicit(self): self.assertEqual(info['logBucket'], LOG_BUCKET) self.assertEqual(info['logObjectPrefix'], LOG_PFX) kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(len(kw), 3) + self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], after) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + self.assertEqual(kw[1]['method'], 'PATCH') + self.assertEqual(kw[1]['path'], '/b/%s' % NAME) + self.assertEqual(kw[1]['data'], resp_to_enable_logging) + self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[2]['method'], 'GET') + self.assertEqual(kw[2]['path'], '/b/%s' % NAME) + self.assertEqual(kw[2]['query_params'], {'projection': 'noAcl'}) def test_disable_logging(self): NAME = 'name' before = {'logging': {'logBucket': 'logs', 'logObjectPrefix': 'pfx'}} - after = {'logging': None} - connection = _Connection(after) + resp_to_reload = before + resp_to_disable_logging = {'logging': None} + connection = _Connection(resp_to_reload, resp_to_disable_logging, + resp_to_disable_logging) bucket = self._makeOne(connection, NAME, before) self.assertTrue(bucket.get_logging() is not None) bucket.disable_logging() self.assertTrue(bucket.get_logging() is None) kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(len(kw), 3) + self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'logging': None}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + self.assertEqual(kw[1]['method'], 'PATCH') + self.assertEqual(kw[1]['path'], '/b/%s' % NAME) + self.assertEqual(kw[1]['data'], {'logging': None}) + self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[2]['method'], 'GET') + self.assertEqual(kw[2]['path'], '/b/%s' % NAME) + self.assertEqual(kw[2]['query_params'], {'projection': 'noAcl'}) def test_metageneration(self): METAGENERATION = 42 @@ -904,6 +926,7 @@ def test_versioning_enabled_getter_missing(self): NAME = 'name' connection = _Connection({}) bucket = self._makeOne(connection, NAME) + bucket._reload_properties() self.assertEqual(bucket.versioning_enabled, False) kw = connection._requested self.assertEqual(len(kw), 1) diff --git a/regression/storage.py b/regression/storage.py index 86899c04769b..99c04a1583f5 100644 --- a/regression/storage.py +++ b/regression/storage.py @@ -123,7 +123,7 @@ def test_large_file_write_from_stream(self): self.bucket.upload_file_object(file_obj, blob=blob) self.case_blobs_to_delete.append(blob) - blob._properties.clear() # force a reload + blob._reload_properties() # force a reload self.assertEqual(blob.md5_hash, file_data['hash']) def test_small_file_write_from_filename(self): @@ -134,7 +134,7 @@ def test_small_file_write_from_filename(self): blob.upload_from_filename(file_data['path']) self.case_blobs_to_delete.append(blob) - blob._properties.clear() # force a reload + blob._reload_properties() # force a reload self.assertEqual(blob.md5_hash, file_data['hash']) def test_write_metadata(self): @@ -144,7 +144,7 @@ def test_write_metadata(self): # NOTE: This should not be necessary. We should be able to pass # it in to upload_file and also to upload_from_string. blob.content_type = 'image/png' - blob._properties.clear() # force a reload + blob._reload_properties() # force a reload self.assertEqual(blob.content_type, 'image/png') def test_direct_write_and_read_into_file(self): @@ -154,6 +154,7 @@ def test_direct_write_and_read_into_file(self): self.case_blobs_to_delete.append(blob) same_blob = storage.Blob(bucket=self.bucket, name='MyBuffer') + same_blob._reload_properties() # force a reload temp_filename = tempfile.mktemp() with open(temp_filename, 'w') as file_obj: same_blob.download_to_file(file_obj) @@ -171,7 +172,9 @@ def test_copy_existing_file(self): new_blob = self.bucket.copy_blob(blob, self.bucket, 'CloudLogoCopy') self.case_blobs_to_delete.append(new_blob) + blob._reload_properties() # force a reload base_contents = blob.download_as_string() + new_blob._reload_properties() # force a reload copied_contents = new_blob.download_as_string() self.assertEqual(base_contents, copied_contents)