diff --git a/gcloud/_helpers.py b/gcloud/_helpers.py index 04598770cc92..c376c899f88b 100644 --- a/gcloud/_helpers.py +++ b/gcloud/_helpers.py @@ -228,15 +228,6 @@ def set_default_project(project=None): raise EnvironmentError('No project could be inferred.') -def get_default_project(): - """Get default project. - - :rtype: string or ``NoneType`` - :returns: The default project if one has been set. - """ - return _DEFAULTS.project - - class _DefaultsContainer(object): """Container for defaults. diff --git a/gcloud/_testing.py b/gcloud/_testing.py index 18d21b19b358..340ec019c9a2 100644 --- a/gcloud/_testing.py +++ b/gcloud/_testing.py @@ -35,11 +35,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): setattr(self.module, key, value) -def _monkey_defaults(*args, **kwargs): - mock_defaults = _DefaultsContainer(*args, **kwargs) - return _Monkey(_helpers, _DEFAULTS=mock_defaults) - - def _setup_defaults(test_case, *args, **kwargs): test_case._replaced_defaults = _helpers._DEFAULTS _helpers._DEFAULTS = _DefaultsContainer(*args, **kwargs) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 8f074ee79bd6..d972079f06d5 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -20,9 +20,6 @@ from Crypto.Hash import MD5 import base64 -from gcloud.storage._implicit_environ import get_default_connection -from gcloud.storage.batch import Batch - class _PropertyMixin(object): """Abstract mixin for cloud storage classes with associated propertties. @@ -35,33 +32,34 @@ class _PropertyMixin(object): :param name: The name of the object. """ - @property - def path(self): - """Abstract getter for the object path.""" - raise NotImplementedError - def __init__(self, name=None): self.name = name self._properties = {} self._changes = set() - @staticmethod - def _client_or_connection(client): - """Temporary method to get a connection from a client. + @property + def path(self): + """Abstract getter for the object path.""" + raise NotImplementedError - If the client is null, gets the connection from the environment. + @property + def client(self): + """Abstract getter for the object client.""" + raise NotImplementedError + + def _require_client(self, client): + """Check client or verify over-ride. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to default connection. + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current object. - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: The connection determined from the ``client`` or environment. + :rtype: :class:`gcloud.storage.client.Client` + :returns: The client passed in or the currently bound client. """ if client is None: - return _require_connection() - else: - return client.connection + client = self.client + return client def reload(self, client=None): """Reload properties from Cloud Storage. @@ -70,11 +68,11 @@ def reload(self, client=None): :param client: Optional. The client to use. If not passed, falls back to default connection. """ - connection = self._client_or_connection(client) + client = self._require_client(client) # Pass only '?projection=noAcl' here because 'acl' and related # are handled via custom endpoints. query_params = {'projection': 'noAcl'} - api_response = connection.api_request( + api_response = client.connection.api_request( method='GET', path=self.path, query_params=query_params, _target_object=self) self._set_properties(api_response) @@ -116,41 +114,17 @@ def patch(self, client=None): :param client: Optional. The client to use. If not passed, falls back to default connection. """ - connection = self._client_or_connection(client) + client = self._require_client(client) # Pass '?projection=full' here because 'PATCH' documented not # to work properly w/ 'noAcl'. update_properties = dict((key, self._properties[key]) for key in self._changes) - api_response = connection.api_request( + api_response = client.connection.api_request( method='PATCH', path=self.path, data=update_properties, query_params={'projection': 'full'}, _target_object=self) self._set_properties(api_response) -def _require_connection(connection=None): - """Infer a connection from the environment, if not passed explicitly. - - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: Optional. - - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: A connection based on the current environment. - :raises: :class:`EnvironmentError` if ``connection`` is ``None``, and - cannot be inferred from the environment. - """ - # NOTE: We use current Batch directly since it inherits from Connection. - if connection is None: - connection = Batch.current() - - if connection is None: - connection = get_default_connection() - - if connection is None: - raise EnvironmentError('Connection could not be inferred.') - - return connection - - def _scalar_property(fieldname): """Create a property descriptor around the :class:`_PropertyMixin` helpers. """ diff --git a/gcloud/storage/_implicit_environ.py b/gcloud/storage/_implicit_environ.py deleted file mode 100644 index 3b75232a28d8..000000000000 --- a/gcloud/storage/_implicit_environ.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2015 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Module to provide implicit behavior based on enviroment. - -Allows the storage package to infer the default bucket and connection -from the enviroment. -""" - - -from gcloud._helpers import _lazy_property_deco -from gcloud.storage.connection import Connection - - -class _DefaultsContainer(object): - """Container for defaults. - - :type bucket: :class:`gcloud.storage.bucket.Bucket` - :param bucket: Persistent implied default bucket from environment. - - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: Persistent implied connection from environment. - - :type implicit: boolean - :param implicit: Boolean indicating if the container should allow - implicit properties. - """ - - @_lazy_property_deco - @staticmethod - def connection(): - """Return the implicit default connection..""" - return get_connection() - - def __init__(self, bucket=None, connection=None, implicit=False): - self.bucket = bucket - if connection is not None or not implicit: - self.connection = connection - - -def get_default_bucket(): - """Get default bucket. - - :rtype: :class:`gcloud.storage.bucket.Bucket` or ``NoneType`` - :returns: The default bucket if one has been set. - """ - return _DEFAULTS.bucket - - -def get_default_connection(): - """Get default connection. - - :rtype: :class:`gcloud.storage.connection.Connection` or ``NoneType`` - :returns: The default connection if one has been set. - """ - return _DEFAULTS.connection - - -def get_connection(): - """Shortcut method to establish a connection to Cloud Storage. - - Use this if you are going to access several buckets with the same - set of credentials: - - >>> from gcloud import storage - >>> connection = storage.get_connection() - >>> bucket1 = storage.get_bucket('bucket1', connection=connection) - >>> bucket2 = storage.get_bucket('bucket2', connection=connection) - - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: A connection defined with the proper credentials. - """ - return Connection.from_environment() - - -def set_default_connection(connection=None): - """Set default connection either explicitly or implicitly as fall-back. - - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: A connection provided to be the default. - """ - connection = connection or get_connection() - _DEFAULTS.connection = connection - - -_DEFAULTS = _DefaultsContainer(implicit=True) diff --git a/gcloud/storage/_testing.py b/gcloud/storage/_testing.py deleted file mode 100644 index 4eba1a284c54..000000000000 --- a/gcloud/storage/_testing.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Shared storage testing utilities.""" - -from gcloud._testing import _Monkey -from gcloud.storage import _implicit_environ -from gcloud.storage._implicit_environ import _DefaultsContainer - - -def _monkey_defaults(*args, **kwargs): - mock_defaults = _DefaultsContainer(*args, **kwargs) - return _Monkey(_implicit_environ, _DEFAULTS=mock_defaults) - - -def _setup_defaults(test_case, *args, **kwargs): - test_case._replaced_defaults = _implicit_environ._DEFAULTS - _implicit_environ._DEFAULTS = _DefaultsContainer(*args, **kwargs) - - -def _tear_down_defaults(test_case): - _implicit_environ._DEFAULTS = test_case._replaced_defaults diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 688e6194c635..46c228554b77 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -78,8 +78,6 @@ when sending metadata for ACLs to the API. """ -from gcloud.storage._helpers import _require_connection - class _ACLEntity(object): """Class representing a set of roles for an entity. @@ -351,23 +349,24 @@ def get_entities(self): self._ensure_loaded() return list(self.entities.values()) - @staticmethod - def _client_or_connection(client): - """Temporary method to get a connection from a client. + @property + def client(self): + """Abstract getter for the object client.""" + raise NotImplementedError - If the client is null, gets the connection from the environment. + def _require_client(self, client): + """Check client or verify over-ride. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to default connection. + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current object. - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: The connection determined from the ``client`` or environment. + :rtype: :class:`gcloud.storage.client.Client` + :returns: The client passed in or the currently bound client. """ if client is None: - return _require_connection() - else: - return client.connection + client = self.client + return client def reload(self, client=None): """Reload the ACL data from Cloud Storage. @@ -377,11 +376,11 @@ def reload(self, client=None): to default connection. """ path = self.reload_path - connection = self._client_or_connection(client) + client = self._require_client(client) self.entities.clear() - found = connection.api_request(method='GET', path=path) + found = client.connection.api_request(method='GET', path=path) self.loaded = True for entry in found.get('items', ()): self.add_entity(self.entity_from_dict(entry)) @@ -405,8 +404,8 @@ def save(self, acl=None, client=None): if save_to_backend: path = self.save_path - connection = self._client_or_connection(client) - result = connection.api_request( + client = self._require_client(client) + result = client.connection.api_request( method='PATCH', path=path, data={self._URL_PATH_ELEM: list(acl)}, @@ -442,6 +441,11 @@ def __init__(self, bucket): super(BucketACL, self).__init__() self.bucket = bucket + @property + def client(self): + """The client bound to this ACL's bucket.""" + return self.bucket.client + @property def reload_path(self): """Compute the path for GET API requests for this ACL.""" @@ -470,6 +474,11 @@ def __init__(self, blob): super(ObjectACL, self).__init__() self.blob = blob + @property + def client(self): + """The client bound to this ACL's blob.""" + return self.blob.client + @property def reload_path(self): """Compute the path for GET API requests for this ACL.""" diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index bc817ffcb6ef..0f4a239464fc 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -32,9 +32,7 @@ from gcloud.credentials import generate_signed_url from gcloud.exceptions import NotFound from gcloud.storage._helpers import _PropertyMixin -from gcloud.storage._helpers import _require_connection from gcloud.storage._helpers import _scalar_property -from gcloud.storage import _implicit_environ from gcloud.storage.acl import ObjectACL from gcloud._helpers import _RFC3339_MICROS @@ -50,8 +48,7 @@ class Blob(_PropertyMixin): unique path of the object in the bucket. :type bucket: :class:`gcloud.storage.bucket.Bucket` - :param bucket: The bucket to which this blob belongs. Required, unless the - implicit default bucket has been set. + :param bucket: The bucket to which this blob belongs. :type chunk_size: integer :param chunk_size: The size of a chunk of data whenever iterating (1 MB). @@ -64,13 +61,7 @@ class Blob(_PropertyMixin): _CHUNK_SIZE_MULTIPLE = 256 * 1024 """Number (256 KB, in bytes) that must divide the chunk size.""" - def __init__(self, name, bucket=None, chunk_size=None): - if bucket is None: - bucket = _implicit_environ.get_default_bucket() - - if bucket is None: - raise ValueError('A Blob must have a bucket set.') - + def __init__(self, name, bucket, chunk_size=None): super(Blob, self).__init__(name=name) self.chunk_size = chunk_size # Check that setter accepts value. @@ -141,6 +132,11 @@ def path(self): return self.path_helper(self.bucket.path, self.name) + @property + def client(self): + """The client bound to this blob.""" + return self.bucket.client + @property def public_url(self): """The public URL for this blob's object. @@ -172,8 +168,8 @@ def generate_signed_url(self, expiration, method='GET', :param method: The HTTP verb that will be used when requesting the URL. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to the ``connection`` stored on the blob's bucket. + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the blob's bucket. :type credentials: :class:`oauth2client.client.OAuth2Credentials` or :class:`NoneType` @@ -182,62 +178,40 @@ def generate_signed_url(self, expiration, method='GET', :rtype: string :returns: A signed URL you can use to access the resource until expiration. - :raises: :class:`ValueError` if no credentials could be determined - from the arguments. """ resource = '/{bucket_name}/{quoted_name}'.format( bucket_name=self.bucket.name, quoted_name=quote(self.name, safe='')) if credentials is None: - if client is not None: - credentials = client.connection.credentials - else: - raise ValueError('Credentials could be determined.') + client = self._require_client(client) + credentials = client.connection.credentials return generate_signed_url( credentials, resource=resource, api_access_endpoint=_API_ACCESS_ENDPOINT, expiration=expiration, method=method) - @staticmethod - def _client_or_connection(client): - """Temporary method to get a connection from a client. - - If the client is null, gets the connection from the environment. - - :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to default connection. - - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: The connection determined from the ``client`` or environment. - """ - if client is None: - return _require_connection() - else: - return client.connection - def exists(self, client=None): """Determines whether or not this blob exists. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :rtype: boolean :returns: True if the blob exists in Cloud Storage. """ - connection = self._client_or_connection(client) + client = self._require_client(client) try: # We only need the status code (200 or not) so we seek to # minimize the returned payload. query_params = {'fields': 'name'} # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. - connection.api_request(method='GET', path=self.path, - query_params=query_params, - _target_object=None) + client.connection.api_request(method='GET', path=self.path, + query_params=query_params, + _target_object=None) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. @@ -261,7 +235,7 @@ def rename(self, new_name, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :rtype: :class:`Blob` :returns: The newly-copied blob. @@ -276,7 +250,7 @@ def delete(self, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :rtype: :class:`Blob` :returns: The blob that was just deleted. @@ -294,11 +268,11 @@ def download_to_file(self, file_obj, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :raises: :class:`gcloud.exceptions.NotFound` """ - connection = self._client_or_connection(client) + client = self._require_client(client) download_url = self.media_link # Use apitools 'Download' facility. @@ -309,7 +283,7 @@ def download_to_file(self, file_obj, client=None): headers['Range'] = 'bytes=0-%d' % (self.chunk_size - 1,) request = http_wrapper.Request(download_url, 'GET', headers) - download.InitializeDownload(request, connection.http) + download.InitializeDownload(request, client.connection.http) # Should we be passing callbacks through from caller? We can't # pass them as None, because apitools wants to print to the console @@ -325,7 +299,7 @@ def download_to_filename(self, filename, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :raises: :class:`gcloud.exceptions.NotFound` """ @@ -340,7 +314,7 @@ def download_as_string(self, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :rtype: bytes :returns: The data stored in this blob. @@ -391,12 +365,13 @@ def upload_from_file(self, file_obj, rewind=False, size=None, :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. :raises: :class:`ValueError` if size is not passed in and can not be determined """ - connection = self._client_or_connection(client) + client = self._require_client(client) + connection = client.connection content_type = (content_type or self._properties.get('contentType') or 'application/octet-stream') @@ -486,7 +461,7 @@ def upload_from_filename(self, filename, content_type=None, :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. """ content_type = content_type or self._properties.get('contentType') if content_type is None: @@ -521,7 +496,7 @@ def upload_from_string(self, data, content_type='text/plain', :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the blob's bucket. """ if isinstance(data, six.text_type): data = data.encode('utf-8') @@ -535,8 +510,8 @@ def make_public(self, client=None): """Make this blob public giving all users read access. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to the ``connection`` stored on the blob's bucket. + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the blob's bucket. """ self.acl.all().grant_read() self.acl.save(client=client) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 7bb0a9ab1303..fb138ec78949 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -21,7 +21,6 @@ import pytz import six -from gcloud._helpers import get_default_project from gcloud.exceptions import NotFound from gcloud.iterator import Iterator from gcloud.storage._helpers import _PropertyMixin @@ -100,20 +99,6 @@ def __init__(self, client, name=None): def __repr__(self): return '' % self.name - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current object. - - :rtype: :class:`gcloud.storage.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self.client - return client - def exists(self, client=None): """Determines whether or not this bucket exists. @@ -141,7 +126,7 @@ def exists(self, client=None): except NotFound: return False - def create(self, project=None, client=None): + def create(self, client=None): """Creates current bucket. If the bucket already exists, will raise @@ -149,27 +134,15 @@ def create(self, project=None, client=None): This implements "storage.buckets.insert". - :type project: string - :param project: Optional. The project to use when creating bucket. - If not provided, falls back to default. - :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :rtype: :class:`gcloud.storage.bucket.Bucket` :returns: The newly created bucket. - :raises: :class:`EnvironmentError` if the project is not given and - can't be inferred. """ client = self._require_client(client) - if project is None: - project = get_default_project() - if project is None: - raise EnvironmentError('Project could not be inferred ' - 'from environment.') - - query_params = {'project': project} + query_params = {'project': client.project} api_response = client.connection.api_request( method='POST', path='/b', query_params=query_params, data={'name': self.name}, _target_object=self) diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index df18da35e119..ca82cf1b2528 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -116,7 +116,7 @@ def create_bucket(self, bucket_name): :returns: The newly created bucket. """ bucket = Bucket(self, name=bucket_name) - bucket.create(self.project, client=self) + bucket.create(client=self) return bucket def list_buckets(self, max_results=None, page_token=None, prefix=None, diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index c2e72cc54213..740cfd4e2e5b 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -34,29 +34,13 @@ def path(self): return Derived - def _monkey(self, connection): - from gcloud.storage._testing import _monkey_defaults - return _monkey_defaults(connection=connection) - def test_path_is_abstract(self): mixin = self._makeOne() self.assertRaises(NotImplementedError, lambda: mixin.path) - def test_reload_w_implicit_connection(self): - connection = _Connection({'foo': 'Foo'}) - derived = self._derivedClass('/path')() - # Make sure changes is not a set, so we can observe a change. - derived._changes = object() - with self._monkey(connection): - derived.reload() - self.assertEqual(derived._properties, {'foo': 'Foo'}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/path') - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - # Make sure changes get reset by reload. - self.assertEqual(derived._changes, set()) + def test_client_is_abstract(self): + mixin = self._makeOne() + self.assertRaises(NotImplementedError, lambda: mixin.client) def test_reload_w_explicit_connection(self): connection = _Connection({'foo': 'Foo'}) @@ -86,27 +70,6 @@ def test__patch_property(self): derived._patch_property('foo', 'Foo') self.assertEqual(derived._properties, {'foo': 'Foo'}) - def test_patch_w_implicit_connection(self): - connection = _Connection({'foo': 'Foo'}) - derived = self._derivedClass('/path')() - # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {'bar': BAR, 'baz': BAZ} - derived._changes = set(['bar']) # Ignore baz. - with self._monkey(connection): - derived.patch() - self.assertEqual(derived._properties, {'foo': 'Foo'}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/path') - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - # Since changes does not include `baz`, we don't see it sent. - self.assertEqual(kw[0]['data'], {'bar': BAR}) - # Make sure changes get reset by patch(). - self.assertEqual(derived._changes, set()) - def test_patch_w_explicit_connection(self): connection = _Connection({'foo': 'Foo'}) client = _Client(connection) @@ -157,44 +120,6 @@ def _patch_property(self, name, value): self.assertEqual(test._patched, ('solfege', 'Latido')) -class Test__require_connection(unittest2.TestCase): - - def _callFUT(self, connection=None): - from gcloud.storage._helpers import _require_connection - return _require_connection(connection=connection) - - def _monkey(self, connection): - from gcloud.storage._testing import _monkey_defaults - return _monkey_defaults(connection=connection) - - def test_implicit_unset(self): - with self._monkey(None): - with self.assertRaises(EnvironmentError): - self._callFUT() - - def test_implicit_unset_w_existing_batch(self): - CONNECTION = object() - with self._monkey(None): - with _NoCommitBatch(connection=CONNECTION): - self.assertEqual(self._callFUT(), CONNECTION) - - def test_implicit_unset_passed_explicitly(self): - CONNECTION = object() - with self._monkey(None): - self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) - - def test_implicit_set(self): - IMPLICIT_CONNECTION = object() - with self._monkey(IMPLICIT_CONNECTION): - self.assertTrue(self._callFUT() is IMPLICIT_CONNECTION) - - def test_implicit_set_passed_explicitly(self): - IMPLICIT_CONNECTION = object() - CONNECTION = object() - with self._monkey(IMPLICIT_CONNECTION): - self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) - - class Test__base64_md5hash(unittest2.TestCase): def _callFUT(self, bytes_to_sign): @@ -290,21 +215,6 @@ def b64encode(self, value): return value -class _NoCommitBatch(object): - - def __init__(self, connection): - self._connection = connection - - def __enter__(self): - from gcloud.storage.batch import _BATCHES - _BATCHES.push(self._connection) - return self._connection - - def __exit__(self, *args): - from gcloud.storage.batch import _BATCHES - _BATCHES.pop() - - class _Client(object): def __init__(self, connection): diff --git a/gcloud/storage/test__implicit_environ.py b/gcloud/storage/test__implicit_environ.py deleted file mode 100644 index 2176a7090066..000000000000 --- a/gcloud/storage/test__implicit_environ.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest2 - - -class Test_get_default_bucket(unittest2.TestCase): - - def _callFUT(self): - from gcloud.storage._implicit_environ import get_default_bucket - return get_default_bucket() - - def test_wo_override(self): - self.assertTrue(self._callFUT() is None) - - -class Test_get_default_connection(unittest2.TestCase): - - def setUp(self): - from gcloud.storage._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.storage._testing import _tear_down_defaults - _tear_down_defaults(self) - - def _callFUT(self): - from gcloud.storage._implicit_environ import get_default_connection - return get_default_connection() - - def test_wo_override(self): - self.assertTrue(self._callFUT() is None) - - -class Test_get_connection(unittest2.TestCase): - - def _callFUT(self, *args, **kw): - from gcloud.storage._implicit_environ import get_connection - return get_connection(*args, **kw) - - def test_it(self): - from gcloud import credentials - from gcloud.storage import SCOPE - from gcloud.storage.connection import Connection - from gcloud.test_credentials import _Client - from gcloud._testing import _Monkey - client = _Client() - with _Monkey(credentials, client=client): - found = self._callFUT() - self.assertTrue(isinstance(found, Connection)) - self.assertTrue(found._credentials is client._signed) - self.assertEqual(found._credentials._scopes, SCOPE) - self.assertTrue(client._get_app_default_called) - - -class Test_set_default_connection(unittest2.TestCase): - - def setUp(self): - from gcloud.storage._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.storage._testing import _tear_down_defaults - _tear_down_defaults(self) - - def _callFUT(self, connection=None): - from gcloud.storage._implicit_environ import set_default_connection - return set_default_connection(connection=connection) - - def test_set_explicit(self): - from gcloud.storage import _implicit_environ - - self.assertEqual(_implicit_environ.get_default_connection(), None) - fake_cnxn = object() - self._callFUT(connection=fake_cnxn) - self.assertEqual(_implicit_environ.get_default_connection(), fake_cnxn) - - def test_set_implicit(self): - from gcloud._testing import _Monkey - from gcloud.storage import _implicit_environ - - self.assertEqual(_implicit_environ.get_default_connection(), None) - - fake_cnxn = object() - _called_args = [] - _called_kwargs = [] - - def mock_get_connection(*args, **kwargs): - _called_args.append(args) - _called_kwargs.append(kwargs) - return fake_cnxn - - with _Monkey(_implicit_environ, get_connection=mock_get_connection): - self._callFUT() - - self.assertEqual(_implicit_environ.get_default_connection(), fake_cnxn) - self.assertEqual(_called_args, [()]) - self.assertEqual(_called_kwargs, [{}]) - - -class Test_lazy_loading(unittest2.TestCase): - - def setUp(self): - from gcloud.storage._testing import _setup_defaults - _setup_defaults(self, implicit=True) - - def tearDown(self): - from gcloud.storage._testing import _tear_down_defaults - _tear_down_defaults(self) - - def test_descriptor_for_connection(self): - from gcloud._testing import _Monkey - from gcloud.storage import _implicit_environ - - self.assertFalse( - 'connection' in _implicit_environ._DEFAULTS.__dict__) - - DEFAULT = object() - - with _Monkey(_implicit_environ, get_connection=lambda: DEFAULT): - lazy_loaded = _implicit_environ._DEFAULTS.connection - - self.assertEqual(lazy_loaded, DEFAULT) - self.assertTrue( - 'connection' in _implicit_environ._DEFAULTS.__dict__) diff --git a/gcloud/storage/test_acl.py b/gcloud/storage/test_acl.py index d4d1607f2920..34785436fa80 100644 --- a/gcloud/storage/test_acl.py +++ b/gcloud/storage/test_acl.py @@ -148,6 +148,10 @@ def _reload(): acl._ensure_loaded() self.assertTrue(acl._really_loaded) + def test_client_is_abstract(self): + acl = self._makeOne() + self.assertRaises(NotImplementedError, lambda: acl.client) + def test_reset(self): TYPE = 'type' ID = 'id' @@ -505,23 +509,6 @@ def test_get_entities_nonempty(self): entity = acl.entity(TYPE, ID) self.assertEqual(acl.get_entities(), [entity]) - def test_reload_missing_w_implicit_connection(self): - # https://github.com/GoogleCloudPlatform/gcloud-python/issues/652 - from gcloud.storage._testing import _monkey_defaults - ROLE = 'role' - connection = _Connection({}) - acl = self._makeOne() - acl.reload_path = '/testing/acl' - acl.loaded = True - acl.entity('allUsers', ROLE) - with _monkey_defaults(connection=connection): - acl.reload() - self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/testing/acl') - def test_reload_missing_w_explicit_connection(self): # https://github.com/GoogleCloudPlatform/gcloud-python/issues/652 ROLE = 'role' @@ -538,23 +525,6 @@ def test_reload_missing_w_explicit_connection(self): self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/testing/acl') - def test_reload_empty_result_clears_local_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - ROLE = 'role' - connection = _Connection({'items': []}) - acl = self._makeOne() - acl.reload_path = '/testing/acl' - acl.loaded = True - acl.entity('allUsers', ROLE) - with _monkey_defaults(connection=connection): - acl.reload() - self.assertTrue(acl.loaded) - self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/testing/acl') - def test_reload_empty_result_clears_local_w_explicit_connection(self): ROLE = 'role' connection = _Connection({'items': []}) @@ -571,23 +541,6 @@ def test_reload_empty_result_clears_local_w_explicit_connection(self): self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/testing/acl') - def test_reload_nonempty_result_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - ROLE = 'role' - connection = _Connection( - {'items': [{'entity': 'allUsers', 'role': ROLE}]}) - acl = self._makeOne() - acl.reload_path = '/testing/acl' - acl.loaded = True - with _monkey_defaults(connection=connection): - acl.reload() - self.assertTrue(acl.loaded) - self.assertEqual(list(acl), [{'entity': 'allUsers', 'role': ROLE}]) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/testing/acl') - def test_reload_nonempty_result_w_explicit_connection(self): ROLE = 'role' connection = _Connection( @@ -604,17 +557,6 @@ def test_reload_nonempty_result_w_explicit_connection(self): self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/testing/acl') - def test_save_none_set_none_passed_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - connection = _Connection() - acl = self._makeOne() - acl._connection = connection - acl.save_path = '/testing' - with _monkey_defaults(connection=connection): - acl.save() - kw = connection._requested - self.assertEqual(len(kw), 0) - def test_save_none_set_none_passed_w_explicit_connection(self): connection = _Connection() client = _Client(connection) @@ -624,22 +566,6 @@ def test_save_none_set_none_passed_w_explicit_connection(self): kw = connection._requested self.assertEqual(len(kw), 0) - def test_save_existing_missing_none_passed_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - connection = _Connection({}) - acl = self._makeOne() - acl.save_path = '/testing' - acl.loaded = True - with _monkey_defaults(connection=connection): - acl.save() - self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0]['data'], {'acl': []}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_save_existing_missing_none_passed_w_explicit_connection(self): connection = _Connection({}) client = _Client(connection) @@ -655,25 +581,6 @@ def test_save_existing_missing_none_passed_w_explicit_connection(self): self.assertEqual(kw[0]['data'], {'acl': []}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_save_no_arg_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - ROLE = 'role' - AFTER = [{'entity': 'allUsers', 'role': ROLE}] - connection = _Connection({'acl': AFTER}) - acl = self._makeOne() - acl.save_path = '/testing' - acl.loaded = True - acl.entity('allUsers').grant(ROLE) - with _monkey_defaults(connection=connection): - acl.save() - self.assertEqual(list(acl), AFTER) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0]['data'], {'acl': AFTER}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_save_no_arg_w_explicit_connection(self): ROLE = 'role' AFTER = [{'entity': 'allUsers', 'role': ROLE}] @@ -692,29 +599,6 @@ def test_save_no_arg_w_explicit_connection(self): self.assertEqual(kw[0]['data'], {'acl': AFTER}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_save_w_arg_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - ROLE1 = 'role1' - ROLE2 = 'role2' - STICKY = {'entity': 'allUsers', 'role': ROLE2} - new_acl = [{'entity': 'allUsers', 'role': ROLE1}] - connection = _Connection({'acl': [STICKY] + new_acl}) - acl = self._makeOne() - acl.save_path = '/testing' - acl.loaded = True - with _monkey_defaults(connection=connection): - acl.save(new_acl) - entries = list(acl) - self.assertEqual(len(entries), 2) - self.assertTrue(STICKY in entries) - self.assertTrue(new_acl[0] in entries) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0]['data'], {'acl': new_acl}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_save_w_arg_w_explicit_connection(self): ROLE1 = 'role1' ROLE2 = 'role2' @@ -737,26 +621,6 @@ def test_save_w_arg_w_explicit_connection(self): self.assertEqual(kw[0]['data'], {'acl': new_acl}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_clear_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - ROLE1 = 'role1' - ROLE2 = 'role2' - STICKY = {'entity': 'allUsers', 'role': ROLE2} - connection = _Connection({'acl': [STICKY]}) - acl = self._makeOne() - acl.save_path = '/testing' - acl.loaded = True - acl.entity('allUsers', ROLE1) - with _monkey_defaults(connection=connection): - acl.clear() - self.assertEqual(list(acl), [STICKY]) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/testing') - self.assertEqual(kw[0]['data'], {'acl': []}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_clear_w_explicit_connection(self): ROLE1 = 'role1' ROLE2 = 'role2' diff --git a/gcloud/storage/test_batch.py b/gcloud/storage/test_batch.py index d9b332dbf776..41a989727104 100644 --- a/gcloud/storage/test_batch.py +++ b/gcloud/storage/test_batch.py @@ -68,14 +68,6 @@ def test_ctor_body_dict(self): class TestBatch(unittest2.TestCase): - def setUp(self): - from gcloud.storage._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.storage._testing import _tear_down_defaults - _tear_down_defaults(self) - def _getTargetClass(self): from gcloud.storage.batch import Batch return Batch @@ -362,6 +354,16 @@ def test_finish_nonempty_non_multipart_response(self): batch._requests.append(('DELETE', URL, {}, None)) self.assertRaises(ValueError, batch.finish) + def test_current(self): + from gcloud.storage.batch import _BATCHES + klass = self._getTargetClass() + batch_top = object() + self.assertEqual(list(_BATCHES), []) + _BATCHES.push(batch_top) + self.assertTrue(klass.current() is batch_top) + _BATCHES.pop() + self.assertEqual(list(_BATCHES), []) + def test_as_context_mgr_wo_error(self): from gcloud.storage.batch import _BATCHES URL = 'http://example.com/api' diff --git a/gcloud/storage/test_blob.py b/gcloud/storage/test_blob.py index 96200fd47734..babbb036fe60 100644 --- a/gcloud/storage/test_blob.py +++ b/gcloud/storage/test_blob.py @@ -24,37 +24,7 @@ def _makeOne(self, *args, **kw): blob._properties = properties or {} return blob - def test_ctor_no_bucket(self): - self.assertRaises(ValueError, self._makeOne, None) - - def test_ctor_implicit_bucket(self): - from gcloud._testing import _Monkey - from gcloud.storage import _implicit_environ - - FAKE_BUCKET = _Bucket() - - def mock_get_bucket(): - return FAKE_BUCKET - - with _Monkey(_implicit_environ, get_default_bucket=mock_get_bucket): - blob = self._makeOne(None) - - self.assertEqual(blob.bucket, FAKE_BUCKET) - self.assertEqual(blob.name, None) - self.assertEqual(blob._properties, {}) - self.assertFalse(blob._acl.loaded) - self.assertTrue(blob._acl.blob is blob) - - def test_ctor_defaults(self): - FAKE_BUCKET = _Bucket() - blob = self._makeOne(None, bucket=FAKE_BUCKET) - self.assertEqual(blob.bucket, FAKE_BUCKET) - self.assertEqual(blob.name, None) - self.assertEqual(blob._properties, {}) - self.assertFalse(blob._acl.loaded) - self.assertTrue(blob._acl.blob is blob) - - def test_ctor_explicit(self): + def test_ctor(self): BLOB_NAME = 'blob-name' bucket = _Bucket() properties = {'key': 'value'} @@ -110,8 +80,9 @@ def test_acl_property(self): def test_path_no_bucket(self): FAKE_BUCKET = object() - blob = self._makeOne(None, bucket=FAKE_BUCKET) - self.assertRaises(ValueError, getattr, blob, 'path') + NAME = 'blob-name' + blob = self._makeOne(NAME, bucket=FAKE_BUCKET) + self.assertRaises(AttributeError, getattr, blob, 'path') def test_path_no_name(self): bucket = _Bucket() @@ -146,8 +117,7 @@ def test_public_url_w_slash_in_name(self): blob.public_url, 'https://storage.googleapis.com/name/parent%2Fchild') - def _basic_generate_signed_url_helper(self, credentials=None, - use_client=True): + def _basic_generate_signed_url_helper(self, credentials=None): from gcloud._testing import _Monkey from gcloud.storage import blob as MUT @@ -155,20 +125,15 @@ def _basic_generate_signed_url_helper(self, credentials=None, EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=2014-10-16T20:34:37.000Z') SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): - if use_client: - signed_uri = blob.generate_signed_url(EXPIRATION, - client=client, - credentials=credentials) - else: - signed_uri = blob.generate_signed_url(EXPIRATION, - credentials=credentials) + signed_uri = blob.generate_signed_url(EXPIRATION, + credentials=credentials) self.assertEqual(signed_uri, URI) PATH = '/name/%s' % (BLOB_NAME,) @@ -184,10 +149,6 @@ def _basic_generate_signed_url_helper(self, credentials=None, } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) - def test_generate_signed_url_w_no_creds(self): - with self.assertRaises(ValueError): - self._basic_generate_signed_url_helper(use_client=False) - def test_generate_signed_url_w_default_method(self): self._basic_generate_signed_url_helper() @@ -203,15 +164,14 @@ def test_generate_signed_url_w_slash_in_name(self): EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=2014-10-16T20:34:37.000Z') SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): - signed_url = blob.generate_signed_url(EXPIRATION, - client=client) + signed_url = blob.generate_signed_url(EXPIRATION) self.assertEqual(signed_url, URI) EXPECTED_ARGS = (_Connection.credentials,) @@ -231,15 +191,14 @@ def test_generate_signed_url_w_explicit_method(self): EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=2014-10-16T20:34:37.000Z') SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): - signed_uri = blob.generate_signed_url(EXPIRATION, method='POST', - client=client) + signed_uri = blob.generate_signed_url(EXPIRATION, method='POST') self.assertEqual(signed_uri, URI) PATH = '/name/%s' % (BLOB_NAME,) @@ -258,20 +217,9 @@ def test_exists_miss(self): not_found_response = {'status': NOT_FOUND} connection = _Connection(not_found_response) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(NONESUCH, bucket=bucket) - self.assertFalse(blob.exists(client=client)) - - def test_exists_implicit(self): - from gcloud.storage._testing import _monkey_defaults - from six.moves.http_client import NOT_FOUND - NONESUCH = 'nonesuch' - not_found_response = {'status': NOT_FOUND} - connection = _Connection(not_found_response) - bucket = _Bucket() - blob = self._makeOne(NONESUCH, bucket=bucket) - with _monkey_defaults(connection=connection): - self.assertFalse(blob.exists()) + self.assertFalse(blob.exists()) def test_exists_hit(self): from six.moves.http_client import OK @@ -279,54 +227,39 @@ def test_exists_hit(self): found_response = {'status': OK} connection = _Connection(found_response) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 - self.assertTrue(blob.exists(client=client)) + self.assertTrue(blob.exists()) def test_rename(self): BLOB_NAME = 'blob-name' NEW_NAME = 'new-name' connection = _Connection() client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client=client) blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 - new_blob = blob.rename(NEW_NAME, client=client) + new_blob = blob.rename(NEW_NAME) self.assertEqual(blob.name, BLOB_NAME) self.assertEqual(new_blob.name, NEW_NAME) self.assertFalse(BLOB_NAME in bucket._blobs) - self.assertEqual(bucket._deleted, [(BLOB_NAME, client)]) + self.assertEqual(bucket._deleted, [(BLOB_NAME, None)]) self.assertTrue(NEW_NAME in bucket._blobs) - def test_delete_w_implicit_connection(self): + def test_delete(self): from six.moves.http_client import NOT_FOUND - from gcloud.storage._testing import _monkey_defaults BLOB_NAME = 'blob-name' not_found_response = {'status': NOT_FOUND} connection = _Connection(not_found_response) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 - with _monkey_defaults(connection=connection): - blob.delete() - self.assertFalse(blob.exists(client=client)) + blob.delete() + self.assertFalse(blob.exists()) self.assertEqual(bucket._deleted, [(BLOB_NAME, None)]) - def test_delete_w_explicit_connection(self): - from six.moves.http_client import NOT_FOUND - BLOB_NAME = 'blob-name' - not_found_response = {'status': NOT_FOUND} - connection = _Connection(not_found_response) - client = _Client(connection) - bucket = _Bucket() - blob = self._makeOne(BLOB_NAME, bucket=bucket) - bucket._blobs[BLOB_NAME] = 1 - blob.delete(client=client) - self.assertFalse(blob.exists(client=client)) - self.assertEqual(bucket._deleted, [(BLOB_NAME, client)]) - def _download_to_file_helper(self, chunk_size=None): from six.moves.http_client import OK from six.moves.http_client import PARTIAL_CONTENT @@ -341,7 +274,7 @@ def _download_to_file_helper(self, chunk_size=None): (chunk2_response, b'def'), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) MEDIA_LINK = 'http://example.com/media/' properties = {'mediaLink': MEDIA_LINK} blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) @@ -349,7 +282,7 @@ def _download_to_file_helper(self, chunk_size=None): blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = chunk_size fh = BytesIO() - blob.download_to_file(fh, client=client) + blob.download_to_file(fh) self.assertEqual(fh.getvalue(), b'abcdef') def test_download_to_file_default(self): @@ -374,7 +307,7 @@ def test_download_to_filename(self): (chunk2_response, b'def'), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) MEDIA_LINK = 'http://example.com/media/' properties = {'mediaLink': MEDIA_LINK, 'updated': '2014-12-06T13:13:50.690Z'} @@ -382,7 +315,7 @@ def test_download_to_filename(self): blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 with NamedTemporaryFile() as f: - blob.download_to_filename(f.name, client=client) + blob.download_to_filename(f.name) f.flush() with open(f.name, 'rb') as g: wrote = g.read() @@ -404,24 +337,24 @@ def test_download_as_string(self): (chunk2_response, b'def'), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) MEDIA_LINK = 'http://example.com/media/' properties = {'mediaLink': MEDIA_LINK} blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 - fetched = blob.download_as_string(client=client) + fetched = blob.download_as_string() self.assertEqual(fetched, b'abcdef') def test_upload_from_file_size_failure(self): BLOB_NAME = 'blob-name' - bucket = _Bucket() - blob = self._makeOne(BLOB_NAME, bucket=bucket) - file_obj = object() connection = _Connection() client = _Client(connection) + bucket = _Bucket(client) + blob = self._makeOne(BLOB_NAME, bucket=bucket) + file_obj = object() with self.assertRaises(ValueError): - blob.upload_from_file(file_obj, size=None, client=client) + blob.upload_from_file(file_obj, size=None) def _upload_from_file_simple_test_helper(self, properties=None, content_type_arg=None, @@ -437,7 +370,7 @@ def _upload_from_file_simple_test_helper(self, properties=None, (response, b'{}'), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 @@ -445,8 +378,7 @@ def _upload_from_file_simple_test_helper(self, properties=None, fh.write(DATA) fh.flush() blob.upload_from_file(fh, rewind=True, - content_type=content_type_arg, - client=client) + content_type=content_type_arg) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -508,7 +440,7 @@ def test_upload_from_file_resumable(self): (chunk2_response, b'{}'), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 @@ -517,7 +449,7 @@ def test_upload_from_file_resumable(self): with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() - blob.upload_from_file(fh, rewind=True, client=client) + blob.upload_from_file(fh, rewind=True) rq = connection.http._requested self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') @@ -567,14 +499,14 @@ def test_upload_from_file_w_slash_in_name(self): (chunk2_response, ''), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() - blob.upload_from_file(fh, rewind=True, client=client) + blob.upload_from_file(fh, rewind=True) self.assertEqual(fh.tell(), len(DATA)) rq = connection.http._requested self.assertEqual(len(rq), 1) @@ -615,7 +547,7 @@ def _upload_from_filename_test_helper(self, properties=None, (chunk2_response, ''), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob._CHUNK_SIZE_MULTIPLE = 1 @@ -623,8 +555,7 @@ def _upload_from_filename_test_helper(self, properties=None, with NamedTemporaryFile(suffix='.jpeg') as fh: fh.write(DATA) fh.flush() - blob.upload_from_filename(fh.name, content_type=content_type_arg, - client=client) + blob.upload_from_filename(fh.name, content_type=content_type_arg) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -682,11 +613,11 @@ def test_upload_from_string_w_bytes(self): (chunk2_response, ''), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 - blob.upload_from_string(DATA, client=client) + blob.upload_from_string(DATA) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -722,11 +653,11 @@ def test_upload_from_string_w_text(self): (chunk2_response, ''), ) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client=client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 - blob.upload_from_string(DATA, client=client) + blob.upload_from_string(DATA) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -743,26 +674,6 @@ def test_upload_from_string_w_text(self): self.assertEqual(headers['Content-Type'], 'text/plain') self.assertEqual(rq[0]['body'], ENCODED) - def test_make_public_w_implicit_ocnnection(self): - from gcloud.storage.acl import _ACLEntity - from gcloud.storage._testing import _monkey_defaults - BLOB_NAME = 'blob-name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after = {'acl': permissive} - connection = _Connection(after) - bucket = _Bucket() - blob = self._makeOne(BLOB_NAME, bucket=bucket) - blob.acl.loaded = True - with _monkey_defaults(connection=connection): - blob.make_public() - self.assertEqual(list(blob.acl), permissive) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % BLOB_NAME) - self.assertEqual(kw[0]['data'], {'acl': permissive}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_make_public_w_explicit_connection(self): from gcloud.storage.acl import _ACLEntity BLOB_NAME = 'blob-name' @@ -770,10 +681,10 @@ def test_make_public_w_explicit_connection(self): after = {'acl': permissive} connection = _Connection(after) client = _Client(connection) - bucket = _Bucket() + bucket = _Bucket(client=client) blob = self._makeOne(BLOB_NAME, bucket=bucket) blob.acl.loaded = True - blob.make_public(client=client) + blob.make_public() self.assertEqual(list(blob.acl), permissive) kw = connection._requested self.assertEqual(len(kw), 1) @@ -1138,7 +1049,11 @@ class _Bucket(object): path = '/b/name' name = 'name' - def __init__(self): + def __init__(self, client=None): + if client is None: + connection = _Connection() + client = _Client(connection) + self.client = client self._blobs = {} self._copied = [] self._deleted = [] diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index fcfacf9bde13..8f070d7026ac 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -98,16 +98,7 @@ def _makeOne(self, client=None, name=None, properties=None): bucket._properties = properties or {} return bucket - def test_ctor_defaults(self): - bucket = self._makeOne() - self.assertEqual(bucket.name, None) - self.assertEqual(bucket._properties, {}) - self.assertFalse(bucket._acl.loaded) - self.assertTrue(bucket._acl.bucket is bucket) - self.assertFalse(bucket._default_object_acl.loaded) - self.assertTrue(bucket._default_object_acl.bucket is bucket) - - def test_ctor_explicit(self): + def test_ctor(self): NAME = 'name' properties = {'key': 'value'} bucket = self._makeOne(name=NAME, properties=properties) @@ -171,41 +162,14 @@ def api_request(cls, *args, **kwargs): expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) - def test_create_no_project(self): - from gcloud._testing import _monkey_defaults - BUCKET_NAME = 'bucket-name' - bucket = self._makeOne(name=BUCKET_NAME) - connection = _Connection() - client = _Client(connection) - with _monkey_defaults(project=None): - self.assertRaises(EnvironmentError, bucket.create, - client=client) - def test_create_hit_explicit_project(self): BUCKET_NAME = 'bucket-name' DATA = {'name': BUCKET_NAME} connection = _Connection(DATA) - client = _Client(connection) - PROJECT = 'PROJECT' - bucket = self._makeOne(name=BUCKET_NAME) - bucket.create(PROJECT, client=client) - - kw, = connection._requested - self.assertEqual(kw['method'], 'POST') - self.assertEqual(kw['path'], '/b') - self.assertEqual(kw['query_params'], {'project': PROJECT}) - self.assertEqual(kw['data'], DATA) - - def test_create_hit_implicit_project(self): - from gcloud._testing import _monkey_defaults - BUCKET_NAME = 'bucket-name' - DATA = {'name': BUCKET_NAME} - connection = _Connection(DATA) - client = _Client(connection) PROJECT = 'PROJECT' - bucket = self._makeOne(name=BUCKET_NAME) - with _monkey_defaults(project=PROJECT): - bucket.create(client=client) + client = _Client(connection, project=PROJECT) + bucket = self._makeOne(client=client, name=BUCKET_NAME) + bucket.create() kw, = connection._requested self.assertEqual(kw['method'], 'POST') @@ -266,7 +230,7 @@ def test_list_blobs_defaults(self): connection = _Connection({'items': []}) client = _Client(connection) bucket = self._makeOne(client=client, name=NAME) - iterator = bucket.list_blobs(client=client) + iterator = bucket.list_blobs() blobs = list(iterator) self.assertEqual(blobs, []) kw, = connection._requested @@ -345,8 +309,8 @@ def test_delete_explicit_hit(self): connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(name=NAME) - result = bucket.delete(force=True, client=client) + bucket = self._makeOne(client=client, name=NAME) + result = bucket.delete(force=True) self.assertTrue(result is None) expected_cw = [{ 'method': 'DELETE', @@ -370,8 +334,8 @@ def test_delete_explicit_force_delete_blobs(self): DELETE_BLOB2_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(name=NAME) - result = bucket.delete(force=True, client=client) + bucket = self._makeOne(client=client, name=NAME) + result = bucket.delete(force=True) self.assertTrue(result is None) expected_cw = [{ 'method': 'DELETE', @@ -388,8 +352,8 @@ def test_delete_explicit_force_miss_blobs(self): connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(name=NAME) - result = bucket.delete(force=True, client=client) + bucket = self._makeOne(client=client, name=NAME) + result = bucket.delete(force=True) self.assertTrue(result is None) expected_cw = [{ 'method': 'DELETE', @@ -411,12 +375,11 @@ def test_delete_explicit_too_many(self): connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(name=NAME) + bucket = self._makeOne(client=client, name=NAME) # Make the Bucket refuse to delete with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.delete, force=True, - client=client) + self.assertRaises(ValueError, bucket.delete, force=True) self.assertEqual(connection._deleted_buckets, []) def test_delete_blob_miss(self): @@ -425,9 +388,8 @@ def test_delete_blob_miss(self): NONESUCH = 'nonesuch' connection = _Connection() client = _Client(connection) - bucket = self._makeOne(name=NAME) - self.assertRaises(NotFound, bucket.delete_blob, NONESUCH, - client=client) + bucket = self._makeOne(client=client, name=NAME) + self.assertRaises(NotFound, bucket.delete_blob, NONESUCH) kw, = connection._requested self.assertEqual(kw['method'], 'DELETE') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) @@ -437,8 +399,8 @@ def test_delete_blob_hit(self): BLOB_NAME = 'blob-name' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(name=NAME) - result = bucket.delete_blob(BLOB_NAME, client=client) + bucket = self._makeOne(client=client, name=NAME) + result = bucket.delete_blob(BLOB_NAME) self.assertTrue(result is None) kw, = connection._requested self.assertEqual(kw['method'], 'DELETE') @@ -448,8 +410,8 @@ def test_delete_blobs_empty(self): NAME = 'name' connection = _Connection() client = _Client(connection) - bucket = self._makeOne(name=NAME) - bucket.delete_blobs([], client=client) + bucket = self._makeOne(client=client, name=NAME) + bucket.delete_blobs([]) self.assertEqual(connection._requested, []) def test_delete_blobs_hit(self): @@ -457,8 +419,8 @@ def test_delete_blobs_hit(self): BLOB_NAME = 'blob-name' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(name=NAME) - bucket.delete_blobs([BLOB_NAME], client=client) + bucket = self._makeOne(client=client, name=NAME) + bucket.delete_blobs([BLOB_NAME]) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'DELETE') @@ -471,9 +433,8 @@ def test_delete_blobs_miss_no_on_error(self): NONESUCH = 'nonesuch' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(name=NAME) - self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH], - client=client) + bucket = self._makeOne(client=client, name=NAME) + self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH]) kw = connection._requested self.assertEqual(len(kw), 2) self.assertEqual(kw[0]['method'], 'DELETE') @@ -487,10 +448,9 @@ def test_delete_blobs_miss_w_on_error(self): NONESUCH = 'nonesuch' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(name=NAME) + bucket = self._makeOne(client=client, name=NAME) errors = [] - bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append, - client=client) + bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append) self.assertEqual(errors, [NONESUCH]) kw = connection._requested self.assertEqual(len(kw), 2) @@ -510,10 +470,10 @@ class _Blob(object): connection = _Connection({}) client = _Client(connection) - source = self._makeOne(name=SOURCE) - dest = self._makeOne(name=DEST) + source = self._makeOne(client=client, name=SOURCE) + dest = self._makeOne(client=client, name=DEST) blob = _Blob() - new_blob = source.copy_blob(blob, dest, client=client) + new_blob = source.copy_blob(blob, dest) self.assertTrue(new_blob.bucket is dest) self.assertEqual(new_blob.name, BLOB_NAME) kw, = connection._requested @@ -534,11 +494,10 @@ class _Blob(object): connection = _Connection({}) client = _Client(connection) - source = self._makeOne(name=SOURCE) - dest = self._makeOne(name=DEST) + source = self._makeOne(client=client, name=SOURCE) + dest = self._makeOne(client=client, name=DEST) blob = _Blob() - new_blob = source.copy_blob(blob, dest, NEW_NAME, - client=client) + new_blob = source.copy_blob(blob, dest, NEW_NAME) self.assertTrue(new_blob.bucket is dest) self.assertEqual(new_blob.name, NEW_NAME) kw, = connection._requested @@ -879,7 +838,7 @@ def test_make_public_defaults(self): bucket = self._makeOne(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - bucket.make_public(client=client) + bucket.make_public() self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) kw = connection._requested @@ -907,7 +866,7 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): bucket = self._makeOne(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded - bucket.make_public(future=True, client=client) + bucket.make_public(future=True) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), permissive) kw = connection._requested @@ -973,10 +932,10 @@ def get_items_from_response(self, response): bucket.acl.loaded = True bucket.default_object_acl.loaded = True bucket._iterator_class = _Iterator - bucket.make_public(recursive=True, client=client) + bucket.make_public(recursive=True) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, True, client)]) + self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None)]) kw = connection._requested self.assertEqual(len(kw), 2) self.assertEqual(kw[0]['method'], 'PATCH') @@ -1006,14 +965,13 @@ def test_make_public_recursive_too_many(self): } connection = _Connection(AFTER, GET_BLOBS_RESP) client = _Client(connection) - bucket = self._makeOne(name=NAME) + bucket = self._makeOne(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True # Make the Bucket refuse to make_public with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.make_public, recursive=True, - client=client) + self.assertRaises(ValueError, bucket.make_public, recursive=True) class _Connection(object): @@ -1056,9 +1014,8 @@ class _Bucket(object): path = '/b/name' name = 'name' - def __init__(self): - connection = _Connection() - self.client = _Client(connection) + def __init__(self, client=None): + self.client = client class MockFile(io.StringIO): @@ -1071,5 +1028,6 @@ def __init__(self, name, buffer_=None): class _Client(object): - def __init__(self, connection): + def __init__(self, connection, project=None): self.connection = connection + self.project = project diff --git a/gcloud/test_credentials.py b/gcloud/test_credentials.py index 25760c861198..b76211a7bc18 100644 --- a/gcloud/test_credentials.py +++ b/gcloud/test_credentials.py @@ -60,21 +60,43 @@ def _teardown_appengine_import(test_case): sys.modules['google'] = test_case._PREV_GOOGLE_MODULE -class TestCredentials(unittest2.TestCase): +class Test_get_credentials(unittest2.TestCase): - def test_get_for_service_account_p12_wo_scope(self): - from tempfile import NamedTemporaryFile + def _callFUT(self): from gcloud import credentials + return credentials.get_credentials() + + def test_it(self): + from gcloud._testing import _Monkey + from gcloud import credentials as MUT + + client = _Client() + with _Monkey(MUT, client=client): + found = self._callFUT() + self.assertTrue(isinstance(found, _Credentials)) + self.assertTrue(found is client._signed) + self.assertTrue(client._get_app_default_called) + + +class Test_get_for_service_account_p12(unittest2.TestCase): + + def _callFUT(self, client_email, private_key_path, scope=None): + from gcloud.credentials import get_for_service_account_p12 + return get_for_service_account_p12(client_email, private_key_path, + scope=scope) + + def test_it(self): + from tempfile import NamedTemporaryFile + from gcloud import credentials as MUT from gcloud._testing import _Monkey CLIENT_EMAIL = 'phred@example.com' PRIVATE_KEY = b'SEEkR1t' client = _Client() - with _Monkey(credentials, client=client): + with _Monkey(MUT, client=client): with NamedTemporaryFile() as file_obj: file_obj.write(PRIVATE_KEY) file_obj.flush() - found = credentials.get_for_service_account_p12( - CLIENT_EMAIL, file_obj.name) + found = self._callFUT(CLIENT_EMAIL, file_obj.name) self.assertTrue(found is client._signed) expected_called_with = { 'service_account_name': CLIENT_EMAIL, @@ -83,20 +105,19 @@ def test_get_for_service_account_p12_wo_scope(self): } self.assertEqual(client._called_with, expected_called_with) - def test_get_for_service_account_p12_w_scope(self): + def test_it_with_scope(self): from tempfile import NamedTemporaryFile - from gcloud import credentials + from gcloud import credentials as MUT from gcloud._testing import _Monkey CLIENT_EMAIL = 'phred@example.com' PRIVATE_KEY = b'SEEkR1t' SCOPE = 'SCOPE' client = _Client() - with _Monkey(credentials, client=client): + with _Monkey(MUT, client=client): with NamedTemporaryFile() as file_obj: file_obj.write(PRIVATE_KEY) file_obj.flush() - found = credentials.get_for_service_account_p12( - CLIENT_EMAIL, file_obj.name, SCOPE) + found = self._callFUT(CLIENT_EMAIL, file_obj.name, SCOPE) self.assertTrue(found is client._signed) expected_called_with = { 'service_account_name': CLIENT_EMAIL, @@ -544,9 +565,6 @@ def create_scoped(self, scopes): self._scopes = scopes return self - def create_scoped_required(self): - return True - class _Client(object): def __init__(self):