diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 9f724045a82b..ee5311f29c91 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -4,35 +4,23 @@ """ -class _MetadataMixin(object): - """Abstract mixin for cloud storage classes with associated metadata. +class _PropertyMixin(object): + """Abstract mixin for cloud storage classes with associated propertties. Non-abstract subclasses should implement: - - CUSTOM_METADATA_FIELDS + - CUSTOM_PROPERTY_ACCESSORS - connection - path """ - CUSTOM_METADATA_FIELDS = None + CUSTOM_PROPERTY_ACCESSORS = None """Mapping of field name -> accessor for fields w/ custom accessors. Expected to be set by subclasses. Fields in this mapping will cause - `get_metadata()` to raise a KeyError with a message to use the relevant - accessor methods. + :meth:`_get_property()` to raise a KeyError with a message to use the + relevant accessor methods. """ - def __init__(self, name=None, metadata=None): - """_MetadataMixin constructor. - - :type name: string - :param name: The name of the object. - - :type metadata: dict - :param metadata: All the other data provided by Cloud Storage. - """ - self.name = name - self.metadata = metadata - @property def connection(self): """Abstract getter for the connection to use.""" @@ -43,90 +31,112 @@ def path(self): """Abstract getter for the object path.""" raise NotImplementedError - def has_metadata(self, field=None): - """Check if metadata is available. + def __init__(self, name=None, properties=None): + """_PropertyMixin constructor. - :type field: string - :param field: (optional) the particular field to check for. + :type name: string + :param name: The name of the object. + + :type metadata: dict + :param metadata: All the other data provided by Cloud Storage. + """ + self.name = name + self._properties = {} + if properties is not None: + self._properties.update(properties) - :rtype: bool - :returns: Whether metadata is available locally. + @property + def properties(self): + """Ensure properties are loaded, and return a copy. """ - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True + if not self._properties: + self._reload_properties() + return self._properties.copy() - def reload_metadata(self): - """Reload metadata from Cloud Storage. + metadata = properties # Backward-compatibiltiy alias - :rtype: :class:`_MetadataMixin` + def _reload_properties(self): + """Reload properties from Cloud Storage. + + :rtype: :class:`_PropertyMixin` :returns: The object you just reloaded data for. """ # Pass only '?projection=noAcl' here because 'acl' and related # are handled via 'get_acl()' etc. query_params = {'projection': 'noAcl'} - self.metadata = self.connection.api_request( + self._properties = self.connection.api_request( method='GET', path=self.path, query_params=query_params) return self + reload_metadata = _reload_properties # backward-compat alias + + def _patch_properties(self, properties): + """Update particular fields of this object's properties. + + This method will only update the fields provided and will not + touch the other fields. + + It will also reload the properties locally based on the server's + response. - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. + :type properties: dict + :param properties: The dictionary of values to update. + + :rtype: :class:`_PropertyMixin` + :returns: The current object. + """ + # Pass '?projection=full' here because 'PATCH' documented not + # to work properly w/ 'noAcl'. + self._properties = self.connection.api_request( + method='PATCH', path=self.path, data=properties, + query_params={'projection': 'full'}) + return self + patch_metadata = _patch_properties # backward-compat alias + + def _has_property(self, field=None): + """Check if property is available. + + :type field: string + :param field: (optional) the particular field to check for. + + :rtype: boolean + :returns: Whether property is available locally. If no ``field`` + passed, return whether *any* properties are available. + """ + if field and field not in self._properties: + return False + return len(self._properties) > 0 + has_metadata = _has_property # backward-compat alias + + def _get_property(self, field, default=None): + """Return the value of a field from the server-side representation. If you request a field that isn't available, and that field can be retrieved by refreshing data from Cloud Storage, this method - will reload the data using :func:`_MetadataMixin.reload_metadata`. + will reload the data using :func:`_PropertyMixin._reload_properties`. :type field: string - :param field: (optional) A particular field to retrieve from metadata. + :param field: A particular field to retrieve from properties. :type default: anything :param default: The value to return if the field provided wasn't found. - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - - :raises: :class:`KeyError` if the field is in CUSTOM_METADATA_FIELDS. + :rtype: anything + :returns: value of the specific field, or the default if not found. """ - # We ignore 'acl' and related fields because they are meant to be - # handled via 'get_acl()' and related methods. - custom = self.CUSTOM_METADATA_FIELDS.get(field) + # Raise for fields which have custom accessors. + custom = self.CUSTOM_PROPERTY_ACCESSORS.get(field) if custom is not None: message = 'Use %s or related methods instead.' % custom raise KeyError((field, message)) - if not self.has_metadata(field=field): - self.reload_metadata() + if not self._properties or field not in self._properties: + self._reload_properties() - if field: - return self.metadata.get(field, default) - else: - return self.metadata - - def patch_metadata(self, metadata): - """Update particular fields of this object's metadata. - - This method will only update the fields provided and will not - touch the other fields. - - It will also reload the metadata locally based on the server's - response. - - :type metadata: dict - :param metadata: The dictionary of values to update. - - :rtype: :class:`_MetadataMixin` - :returns: The current object. - """ - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self + return self._properties.get(field, default) + get_metadata = _get_property # Backward-compat alias def get_acl(self): - """Get ACL metadata as an object. + """Get ACL as an object. :returns: An ACL object for the current object. """ diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e99680e257ec..4fb7d63bf000 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -2,7 +2,7 @@ import os -from gcloud.storage._helpers import _MetadataMixin +from gcloud.storage._helpers import _PropertyMixin from gcloud.storage import exceptions from gcloud.storage.acl import BucketACL from gcloud.storage.acl import DefaultObjectACL @@ -11,7 +11,7 @@ from gcloud.storage.key import _KeyIterator -class Bucket(_MetadataMixin): +class Bucket(_PropertyMixin): """A class representing a Bucket on Cloud Storage. :type connection: :class:`gcloud.storage.connection.Connection` @@ -21,18 +21,18 @@ class Bucket(_MetadataMixin): :param name: The name of the bucket. """ - CUSTOM_METADATA_FIELDS = { + CUSTOM_PROPERTY_ACCESSORS = { 'acl': 'get_acl', 'defaultObjectAcl': 'get_default_object_acl', 'lifecycle': 'get_lifecycle', } - """Mapping of field name -> accessor for fields w/ custom accessors.""" + """Map field name -> accessor for fields w/ custom accessors.""" # ACL rules are lazily retrieved. _acl = _default_object_acl = None - def __init__(self, connection=None, name=None, metadata=None): - super(Bucket, self).__init__(name=name, metadata=metadata) + def __init__(self, connection=None, name=None, properties=None): + super(Bucket, self).__init__(name=name, properties=properties) self._connection = connection @property @@ -60,7 +60,7 @@ def from_dict(cls, bucket_dict, connection=None): :returns: A bucket constructed from the data provided. """ return cls(connection=connection, name=bucket_dict['name'], - metadata=bucket_dict) + properties=bucket_dict) def __repr__(self): return '' % self.name @@ -120,7 +120,7 @@ def get_all_keys(self): """List all the keys in this bucket. This will **not** retrieve all the data for all the keys, it - will only retrieve metadata about the keys. + will only retrieve the keys. This is equivalent to:: @@ -344,7 +344,7 @@ def upload_file_object(self, file_obj, key=None): return key.upload_from_file(file_obj) def configure_website(self, main_page_suffix=None, not_found_page=None): - """Configure website-related metadata. + """Configure website-related properties. .. note:: This (apparently) only works @@ -385,7 +385,7 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): 'notFoundPage': not_found_page, }, } - return self.patch_metadata(data) + return self._patch_properties(data) def disable_website(self): """Disable the website configuration for this bucket. @@ -395,21 +395,11 @@ def disable_website(self): """ return self.configure_website(None, None) - def get_acl(self): - """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. - - :rtype: :class:`gcloud.storage.acl.BucketACL` - :returns: An ACL object for the current bucket. - """ - if not self.acl.loaded: - self.acl.reload() - return self.acl - def get_default_object_acl(self): """Get the current Default Object ACL rules. - If the appropriate metadata isn't available locally, this method - will reload it from Cloud Storage. + If the acl isn't available locally, this method will reload it from + Cloud Storage. :rtype: :class:`gcloud.storage.acl.DefaultObjectACL` :returns: A DefaultObjectACL object for this bucket. @@ -451,10 +441,10 @@ def get_lifecycle(self): :rtype: list(dict) :returns: A sequence of mappings describing each CORS policy. """ - if not self.has_metadata('lifecycle'): - self.reload_metadata() + if not self._has_property('lifecycle'): + self._reload_properties() result = [] - info = self.metadata.get('lifecycle', {}) + info = self._properties.get('lifecycle', {}) for rule in info.get('rule', ()): rule = rule.copy() result.append(rule) @@ -469,7 +459,7 @@ def update_lifecycle(self, rules): :type rules: list(dict) :param rules: A sequence of mappings describing each lifecycle policy. """ - self.patch_metadata({'lifecycle': {'rule': rules}}) + self._patch_properties({'lifecycle': {'rule': rules}}) class BucketIterator(Iterator): diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index cac96b2471f9..b87a7f711d01 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -4,19 +4,19 @@ import os from StringIO import StringIO -from gcloud.storage._helpers import _MetadataMixin +from gcloud.storage._helpers import _PropertyMixin from gcloud.storage.acl import ObjectACL from gcloud.storage.exceptions import StorageError from gcloud.storage.iterator import Iterator -class Key(_MetadataMixin): +class Key(_PropertyMixin): """A wrapper around Cloud Storage's concept of an ``Object``.""" - CUSTOM_METADATA_FIELDS = { + CUSTOM_PROPERTY_ACCESSORS = { 'acl': 'get_acl', } - """Mapping of field name -> accessor for fields w/ custom accessors.""" + """Map field name -> accessor for fields w/ custom accessors.""" CHUNK_SIZE = 1024 * 1024 # 1 MB. """The size of a chunk of data whenever iterating (1 MB). @@ -26,7 +26,7 @@ class Key(_MetadataMixin): # ACL rules are lazily retrieved. _acl = None - def __init__(self, bucket=None, name=None, metadata=None): + def __init__(self, bucket=None, name=None, properties=None): """Key constructor. :type bucket: :class:`gcloud.storage.bucket.Bucket` @@ -36,10 +36,10 @@ def __init__(self, bucket=None, name=None, metadata=None): :param name: The name of the key. This corresponds to the unique path of the object in the bucket. - :type metadata: dict - :param metadata: All the other data provided by Cloud Storage. + :type properties: dict + :param properties: All the other data provided by Cloud Storage. """ - super(Key, self).__init__(name=name, metadata=metadata or {}) + super(Key, self).__init__(name=name, properties=properties) self.bucket = bucket @property @@ -65,7 +65,7 @@ def from_dict(cls, key_dict, bucket=None): :returns: A key based on the data provided. """ - return cls(bucket=bucket, name=key_dict['name'], metadata=key_dict) + return cls(bucket=bucket, name=key_dict['name'], properties=key_dict) def __repr__(self): if self.bucket: diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 313b6b452e9e..100d48e4ecd2 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -1,38 +1,151 @@ import unittest2 -class Test_MetadataMixin(unittest2.TestCase): +class Test_PropertyMixin(unittest2.TestCase): def _getTargetClass(self): - from gcloud.storage._helpers import _MetadataMixin - return _MetadataMixin + from gcloud.storage._helpers import _PropertyMixin + return _PropertyMixin def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_abstract_properties(self): - metadata_object = self._makeOne() - self.assertRaises(NotImplementedError, - lambda: metadata_object.connection) - self.assertRaises(NotImplementedError, - lambda: metadata_object.path) + def _derivedClass(self, connection=None, path=None, **custom_fields): - def test_get_metadata_w_custom_field(self): class Derived(self._getTargetClass()): - CUSTOM_METADATA_FIELDS = {'foo': 'get_foo'} + CUSTOM_PROPERTY_ACCESSORS = custom_fields @property - def connection(self): # pragma: NO COVER - return None + def connection(self): + return connection @property - def path(self): # pragma: NO COVER - return None + def path(self): + return path - derived = Derived() + return Derived + + def test_connetction_is_abstract(self): + mixin = self._makeOne() + self.assertRaises(NotImplementedError, lambda: mixin.connection) + + def test_path_is_abstract(self): + mixin = self._makeOne() + self.assertRaises(NotImplementedError, lambda: mixin.path) + + def test__reload_properties(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + derived._reload_properties() + self.assertEqual(derived._properties, {'foo': 'Foo'}) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test__has_property_not_loaded(self): + mixin = self._makeOne() + self.assertEqual(mixin._has_property('nonesuch'), False) + + def test__has_property_loaded_no_field(self): + mixin = self._makeOne(properties={'foo': 'Foo'}) + self.assertEqual(mixin._has_property(), True) + + def test__has_property_loaded_miss(self): + mixin = self._makeOne(properties={'foo': 'Foo'}) + self.assertEqual(mixin._has_property('nonesuch'), False) + + def test__has_property_loaded_hit(self): + mixin = self._makeOne(properties={'extant': False}) + self.assertEqual(mixin._has_property('extant'), True) + + def test__get_property_eager_hit(self): + derived = self._derivedClass()(properties={'foo': 'Foo'}) + self.assertEqual(derived._get_property('foo'), 'Foo') + + def test__get_property_eager_miss_w_default(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + default = object() + self.assertTrue(derived._get_property('nonesuch', default) is default) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test__get_property_lazy_hit(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + self.assertTrue(derived._get_property('nonesuch') is None) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test__get_property_w_custom_field(self): + derived = self._derivedClass(foo='get_foo')() try: - derived.get_metadata('foo') + derived._get_property('foo') except KeyError as e: self.assertTrue('get_foo' in str(e)) else: # pragma: NO COVER self.assert_('KeyError not raised') + + def test__patch_properties(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + self.assertTrue(derived._patch_properties({'foo': 'Foo'}) is derived) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['data'], {'foo': 'Foo'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_properties_eager(self): + derived = self._derivedClass()(properties={'extant': False}) + self.assertEqual(derived.properties, {'extant': False}) + + def test_properties_lazy(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + self.assertEqual(derived.properties, {'foo': 'Foo'}) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test_get_acl_not_yet_loaded(self): + class ACL(object): + loaded = False + + def reload(self): + self.loaded = True + + mixin = self._makeOne() + acl = mixin.acl = ACL() + self.assertTrue(mixin.get_acl() is acl) + self.assertTrue(acl.loaded) + + def test_get_acl_already_loaded(self): + class ACL(object): + loaded = True + mixin = self._makeOne() + acl = mixin.acl = ACL() + self.assertTrue(mixin.get_acl() is acl) # no 'reload' + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1466ecdde320..5c172e600a98 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -16,41 +16,41 @@ def test_ctor_defaults(self): bucket = self._makeOne() self.assertEqual(bucket.connection, None) self.assertEqual(bucket.name, None) - self.assertEqual(bucket.metadata, None) + self.assertEqual(bucket._properties, {}) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) def test_ctor_explicit(self): NAME = 'name' connection = _Connection() - metadata = {'key': 'value'} - bucket = self._makeOne(connection, NAME, metadata) + properties = {'key': 'value'} + bucket = self._makeOne(connection, NAME, properties) self.assertTrue(bucket.connection is connection) self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket.metadata, metadata) + self.assertEqual(bucket._properties, properties) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) def test_from_dict_defaults(self): NAME = 'name' - metadata = {'key': 'value', 'name': NAME} + properties = {'key': 'value', 'name': NAME} klass = self._getTargetClass() - bucket = klass.from_dict(metadata) + bucket = klass.from_dict(properties) self.assertEqual(bucket.connection, None) self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket.metadata, metadata) + self.assertEqual(bucket.properties, properties) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) def test_from_dict_explicit(self): NAME = 'name' connection = _Connection() - metadata = {'key': 'value', 'name': NAME} + properties = {'key': 'value', 'name': NAME} klass = self._getTargetClass() - bucket = klass.from_dict(metadata, connection) + bucket = klass.from_dict(properties, connection) self.assertTrue(bucket.connection is connection) self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket.metadata, metadata) + self.assertEqual(bucket.properties, properties) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) @@ -406,146 +406,6 @@ def upload_from_file(self, fh): bucket.upload_file_object(FILEOBJECT, KEY) self.assertEqual(_uploaded, [(bucket, KEY, FILEOBJECT)]) - def test_has_metdata_none_set(self): - NONESUCH = 'nonesuch' - bucket = self._makeOne() - self.assertFalse(bucket.has_metadata(NONESUCH)) - - def test_has_metdata_miss(self): - NONESUCH = 'nonesuch' - metadata = {'key': 'value'} - bucket = self._makeOne(metadata=metadata) - self.assertFalse(bucket.has_metadata(NONESUCH)) - - def test_has_metdata_none_passed(self): - KEY = 'key' - metadata = {KEY: 'value'} - bucket = self._makeOne(metadata=metadata) - self.assertTrue(bucket.has_metadata()) - - def test_has_metdata_hit(self): - KEY = 'key' - metadata = {KEY: 'value'} - bucket = self._makeOne(metadata=metadata) - self.assertTrue(bucket.has_metadata(KEY)) - - def test_reload_metadata(self): - NAME = 'name' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - found = bucket.reload_metadata() - self.assertTrue(found is bucket) - self.assertEqual(found.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_none_set_none_passed(self): - NAME = 'name' - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME) - found = bucket.get_metadata() - self.assertEqual(found, after) - self.assertEqual(bucket.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_acl_no_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - self.assertRaises(KeyError, bucket.get_metadata, 'acl') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_acl_w_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - default = object() - self.assertRaises(KeyError, bucket.get_metadata, 'acl', default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_defaultObjectAcl_no_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - self.assertRaises(KeyError, bucket.get_metadata, 'defaultObjectAcl') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_none_set_defaultObjectAcl_miss_clear_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - default = object() - self.assertRaises(KeyError, bucket.get_metadata, 'defaultObjectAcl', - default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_lifecycle_no_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - self.assertRaises(KeyError, bucket.get_metadata, 'lifecycle') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_lifecycle_w_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - default = object() - self.assertRaises(KeyError, bucket.get_metadata, 'lifecycle', default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_miss(self): - NAME = 'name' - before = {'bar': 'Bar'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_metadata('foo'), None) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_hit(self): - NAME = 'name' - before = {'bar': 'Bar'} - connection = _Connection() - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_metadata('bar'), 'Bar') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_patch_metadata(self): - NAME = 'name' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertTrue(bucket.patch_metadata(after) is bucket) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], after) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_configure_website_defaults(self): NAME = 'name' patched = {'website': {'mainPageSuffix': None, @@ -553,7 +413,7 @@ def test_configure_website_defaults(self): connection = _Connection(patched) bucket = self._makeOne(connection, NAME) self.assertTrue(bucket.configure_website() is bucket) - self.assertEqual(bucket.metadata, patched) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -568,7 +428,7 @@ def test_configure_website_explicit(self): connection = _Connection(patched) bucket = self._makeOne(connection, NAME) self.assertTrue(bucket.configure_website('html', '404.html') is bucket) - self.assertEqual(bucket.metadata, patched) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -583,7 +443,7 @@ def test_disable_website(self): connection = _Connection(patched) bucket = self._makeOne(connection, NAME) self.assertTrue(bucket.disable_website() is bucket) - self.assertEqual(bucket.metadata, patched) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -591,30 +451,6 @@ def test_disable_website(self): self.assertEqual(kw[0]['data'], patched) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_get_acl_lazy(self): - from gcloud.storage.acl import BucketACL - NAME = 'name' - connection = _Connection({'items': []}) - bucket = self._makeOne(connection, NAME) - acl = bucket.get_acl() - self.assertTrue(acl is bucket.acl) - self.assertTrue(isinstance(acl, BucketACL)) - self.assertEqual(list(bucket.acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s/acl' % NAME) - - def test_get_acl_eager(self): - connection = _Connection() - bucket = self._makeOne() - preset = bucket.acl # Ensure it is assigned - preset.loaded = True - acl = bucket.get_acl() - self.assertTrue(acl is preset) - kw = connection._requested - self.assertEqual(len(kw), 0) - def test_get_default_object_acl_lazy(self): from gcloud.storage.acl import BucketACL NAME = 'name' diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py index 7ddd77f49cbc..7fdeb2733efe 100644 --- a/gcloud/storage/test_key.py +++ b/gcloud/storage/test_key.py @@ -15,43 +15,43 @@ def test_ctor_defaults(self): self.assertEqual(key.bucket, None) self.assertEqual(key.connection, None) self.assertEqual(key.name, None) - self.assertEqual(key.metadata, {}) + self.assertEqual(key._properties, {}) self.assertTrue(key._acl is None) def test_ctor_explicit(self): KEY = 'key' connection = _Connection() bucket = _Bucket(connection) - metadata = {'key': 'value'} - key = self._makeOne(bucket, KEY, metadata) + properties = {'key': 'value'} + key = self._makeOne(bucket, KEY, properties) self.assertTrue(key.bucket is bucket) self.assertTrue(key.connection is connection) self.assertEqual(key.name, KEY) - self.assertEqual(key.metadata, metadata) + self.assertEqual(key.properties, properties) self.assertTrue(key._acl is None) def test_from_dict_defaults(self): KEY = 'key' - metadata = {'key': 'value', 'name': KEY} + properties = {'key': 'value', 'name': KEY} klass = self._getTargetClass() - key = klass.from_dict(metadata) + key = klass.from_dict(properties) self.assertEqual(key.bucket, None) self.assertEqual(key.connection, None) self.assertEqual(key.name, KEY) - self.assertEqual(key.metadata, metadata) + self.assertEqual(key.properties, properties) self.assertTrue(key._acl is None) def test_from_dict_explicit(self): KEY = 'key' connection = _Connection() bucket = _Bucket(connection) - metadata = {'key': 'value', 'name': KEY} + properties = {'key': 'value', 'name': KEY} klass = self._getTargetClass() - key = klass.from_dict(metadata, bucket) + key = klass.from_dict(properties, bucket) self.assertTrue(key.bucket is bucket) self.assertTrue(key.connection is connection) self.assertEqual(key.name, KEY) - self.assertEqual(key.metadata, metadata) + self.assertEqual(key.properties, properties) self.assertTrue(key._acl is None) def test_acl_property(self): @@ -326,137 +326,6 @@ def test_upload_from_string(self): self.assertEqual(rq[2]['data'], DATA[5:]) self.assertEqual(rq[2]['headers'], {'Content-Range': 'bytes 5-5/6'}) - def test_has_metdata_none_set(self): - NONESUCH = 'nonesuch' - key = self._makeOne() - self.assertFalse(key.has_metadata(NONESUCH)) - - def test_has_metdata_miss(self): - NONESUCH = 'nonesuch' - metadata = {'key': 'value'} - key = self._makeOne(metadata=metadata) - self.assertFalse(key.has_metadata(NONESUCH)) - - def test_has_metdata_none_passed(self): - KEY = 'key' - metadata = {KEY: 'value'} - key = self._makeOne(metadata=metadata) - self.assertTrue(key.has_metadata()) - - def test_has_metdata_hit(self): - KEY = 'key' - metadata = {KEY: 'value'} - key = self._makeOne(metadata=metadata) - self.assertTrue(key.has_metadata(KEY)) - - def test_reload_metadata(self): - KEY = 'key' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - found = key.reload_metadata() - self.assertTrue(found is key) - self.assertEqual(found.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_none_set_none_passed(self): - KEY = 'key' - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - found = key.get_metadata() - self.assertEqual(found, after) - self.assertEqual(key.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_acl_no_default(self): - KEY = 'key' - connection = _Connection() - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - self.assertRaises(KeyError, key.get_metadata, 'acl') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_acl_w_default(self): - KEY = 'key' - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - default = object() - self.assertRaises(KeyError, key.get_metadata, 'acl', default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_miss(self): - KEY = 'key' - before = {'bar': 'Bar'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - self.assertEqual(key.get_metadata('foo'), None) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_hit(self): - KEY = 'key' - before = {'bar': 'Bar'} - connection = _Connection() - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - self.assertEqual(key.get_metadata('bar'), 'Bar') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_patch_metadata(self): - KEY = 'key' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - self.assertTrue(key.patch_metadata(after) is key) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['data'], after) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - - def test_get_acl_lazy(self): - from gcloud.storage.acl import ObjectACL - KEY = 'key' - connection = _Connection({'items': []}) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - acl = key.get_acl() - self.assertTrue(acl is key.acl) - self.assertTrue(isinstance(acl, ObjectACL)) - self.assertEqual(list(key.acl), []) - - def test_get_acl_eager(self): - key = self._makeOne() - preset = key.acl - preset.loaded = True - acl = key.get_acl() - self.assertTrue(acl is preset) - def test_make_public(self): from gcloud.storage.acl import _ACLEntity KEY = 'key'