From d52d81f071d5338224da9c9181f4281e46ea5295 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Nov 2014 14:23:13 -0500 Subject: [PATCH 01/20] Add support for the Bucket's 'versioning' field. See: https://cloud.google.com/storage/docs/object-versioning Addresses part of #314. --- gcloud/storage/bucket.py | 31 ++++++++++++++++++++ gcloud/storage/test_bucket.py | 54 +++++++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e99680e257ec..96f8cde1abd9 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -25,6 +25,7 @@ class Bucket(_MetadataMixin): 'acl': 'get_acl', 'defaultObjectAcl': 'get_default_object_acl', 'lifecycle': 'get_lifecycle', + 'versioning': 'get_versioning', } """Mapping of field name -> accessor for fields w/ custom accessors.""" @@ -471,6 +472,36 @@ def update_lifecycle(self, rules): """ self.patch_metadata({'lifecycle': {'rule': rules}}) + def get_versioning(self): + """Is versioning enabled for this bucket? + + See: https://cloud.google.com/storage/docs/object-versioning for + details. + + :rtype: boolean + :returns: True if enabled, else False. + """ + if not self.has_metadata(field='versioning'): + self.reload_metadata() + versioning = self.metadata.get('versioning', {}) + return versioning.get('enabled', False) + + def enable_versioning(self): + """Enable versioning for this bucket. + + See: https://cloud.google.com/storage/docs/object-versioning for + details. + """ + self.patch_metadata({'versioning': {'enabled': True}}) + + def disable_versioning(self): + """Disable versioning for this bucket. + + See: https://cloud.google.com/storage/docs/object-versioning for + details. + """ + self.patch_metadata({'versioning': {'enabled': False}}) + class BucketIterator(Iterator): """An iterator listing all buckets. diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1466ecdde320..1cfc191c8f21 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -781,6 +781,60 @@ def test_update_lifecycle(self): self.assertEqual(entries[0]['action']['type'], 'Delete') self.assertEqual(entries[0]['condition']['age'], 42) + def test_get_versioning_eager(self): + NAME = 'name' + before = {'bar': 'Bar', 'versioning': {'enabled': True}} + connection = _Connection() + bucket = self._makeOne(connection, NAME, before) + self.assertEqual(bucket.get_versioning(), True) + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_versioning_lazy(self): + NAME = 'name' + before = {'bar': 'Bar'} + after = {'bar': 'Bar', 'versioning': {'enabled': True}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertEqual(bucket.get_versioning(), True) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test_enable_versioning(self): + NAME = 'name' + before = {'versioning': {'enabled': False}} + after = {'versioning': {'enabled': True}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertFalse(bucket.get_versioning()) + bucket.enable_versioning() + self.assertTrue(bucket.get_versioning()) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'versioning': {'enabled': True}}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_disable_versioning(self): + NAME = 'name' + before = {'versioning': {'enabled': True}} + after = {'versioning': {'enabled': False}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertTrue(bucket.get_versioning()) + bucket.disable_versioning() + self.assertFalse(bucket.get_versioning()) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'versioning': {'enabled': False}}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + class TestBucketIterator(unittest2.TestCase): From 8477e85c90a02eeab027061a84edc6550e2fa90b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Nov 2014 14:28:19 -0500 Subject: [PATCH 02/20] Add tests asserting that 'get_metatdata' raises w/ 'versioning'. --- gcloud/storage/test_bucket.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1cfc191c8f21..b330f5b3d886 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -510,6 +510,23 @@ def test_get_metadata_lifecycle_w_default(self): kw = connection._requested self.assertEqual(len(kw), 0) + def test_get_metadata_versioning_no_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + self.assertRaises(KeyError, bucket.get_metadata, 'versioning') + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_metadata_versioning_w_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + default = object() + self.assertRaises(KeyError, bucket.get_metadata, 'versioning', default) + kw = connection._requested + self.assertEqual(len(kw), 0) + def test_get_metadata_miss(self): NAME = 'name' before = {'bar': 'Bar'} From 037a732299063f175df15060d02cd3dad9bdbd93 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Nov 2014 15:01:03 -0500 Subject: [PATCH 03/20] Add support for the Bucket's 'logging' field. See: https://cloud.google.com/storage/docs/accesslogs Addresses 'logging' part of #314. --- gcloud/storage/bucket.py | 40 +++++++++++++ gcloud/storage/test_bucket.py | 102 ++++++++++++++++++++++++++++++++++ 2 files changed, 142 insertions(+) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e99680e257ec..60ad196e0347 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -25,6 +25,7 @@ class Bucket(_MetadataMixin): 'acl': 'get_acl', 'defaultObjectAcl': 'get_default_object_acl', 'lifecycle': 'get_lifecycle', + 'logging': 'get_logging', } """Mapping of field name -> accessor for fields w/ custom accessors.""" @@ -471,6 +472,45 @@ def update_lifecycle(self, rules): """ self.patch_metadata({'lifecycle': {'rule': rules}}) + def get_logging(self): + """Return info about access logging for this bucket. + + See: https://cloud.google.com/storage/docs/accesslogs#status + + :rtype: dict or None + :returns: a dict w/ keys, ``bucket_name`` and ``object_prefix`` + (if logging is enabled), or None (if not). + """ + if not self.has_metadata('logging'): + self.reload_metadata() + info = self.metadata.get('logging') + if info is not None: + info = info.copy() + info['bucket_name'] = info.pop('logBucket') + info['object_prefix'] = info.pop('logObjectPrefix', '') + return info + + def enable_logging(self, bucket_name, object_prefix=''): + """Enable access logging for this bucket. + + See: https://cloud.google.com/storage/docs/accesslogs#delivery + + :type bucket_name: string + :param bucket_name: name of bucket in which to store access logs + + :type object_prefix: string + :param object_prefix: prefix for access log filenames + """ + info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix} + self.patch_metadata({'logging': info}) + + def disable_logging(self): + """Disable access logging for this bucket. + + See: https://cloud.google.com/storage/docs/accesslogs#disabling + """ + self.patch_metadata({'logging': None}) + class BucketIterator(Iterator): """An iterator listing all buckets. diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1466ecdde320..db12b25147f9 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -510,6 +510,23 @@ def test_get_metadata_lifecycle_w_default(self): kw = connection._requested self.assertEqual(len(kw), 0) + def test_get_metadata_logging_no_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + self.assertRaises(KeyError, bucket.get_metadata, 'logging') + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_metadata_logging_w_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + default = object() + self.assertRaises(KeyError, bucket.get_metadata, 'logging', default) + kw = connection._requested + self.assertEqual(len(kw), 0) + def test_get_metadata_miss(self): NAME = 'name' before = {'bar': 'Bar'} @@ -781,6 +798,91 @@ def test_update_lifecycle(self): self.assertEqual(entries[0]['action']['type'], 'Delete') self.assertEqual(entries[0]['condition']['age'], 42) + def test_get_logging_eager_w_prefix(self): + NAME = 'name' + LOG_BUCKET = 'logs' + LOG_PREFIX = 'pfx' + before = { + 'logging': {'logBucket': LOG_BUCKET, + 'logObjectPrefix': LOG_PREFIX}} + connection = _Connection() + bucket = self._makeOne(connection, NAME, before) + info = bucket.get_logging() + self.assertEqual(info['bucket_name'], LOG_BUCKET) + self.assertEqual(info['object_prefix'], LOG_PREFIX) + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_logging_lazy_wo_prefix(self): + NAME = 'name' + LOG_BUCKET = 'logs' + after = {'logging': {'logBucket': LOG_BUCKET}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME) + info = bucket.get_logging() + self.assertEqual(info['bucket_name'], LOG_BUCKET) + self.assertEqual(info['object_prefix'], '') + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test_enable_logging_defaults(self): + NAME = 'name' + LOG_BUCKET = 'logs' + before = {'logging': None} + after = {'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': ''}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertTrue(bucket.get_logging() is None) + bucket.enable_logging(LOG_BUCKET) + info = bucket.get_logging() + self.assertEqual(info['bucket_name'], LOG_BUCKET) + self.assertEqual(info['object_prefix'], '') + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], after) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_enable_logging_explicit(self): + NAME = 'name' + LOG_BUCKET = 'logs' + LOG_PFX = 'pfx' + before = {'logging': None} + after = { + 'logging': {'logBucket': LOG_BUCKET, 'logObjectPrefix': LOG_PFX}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertTrue(bucket.get_logging() is None) + bucket.enable_logging(LOG_BUCKET, LOG_PFX) + info = bucket.get_logging() + self.assertEqual(info['bucket_name'], LOG_BUCKET) + self.assertEqual(info['object_prefix'], LOG_PFX) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], after) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_disable_logging(self): + NAME = 'name' + before = {'logging': {'logBucket': 'logs', 'logObjectPrefix': 'pfx'}} + after = {'logging': None} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertTrue(bucket.get_logging() is not None) + bucket.disable_logging() + self.assertTrue(bucket.get_logging() is None) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'logging': None}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + class TestBucketIterator(unittest2.TestCase): From c586a7c0fa634ce383efeec78ea3d965b079e1b4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Nov 2014 15:33:15 -0500 Subject: [PATCH 04/20] Add CORS support to buckets. See: http://www.w3.org/TR/cors/ and https://cloud.google.com/storage/docs/json_api/v1/buckets Addresses 'cors' part of 314. --- gcloud/storage/bucket.py | 59 +++++++++++++++++++++-- gcloud/storage/test_bucket.py | 88 +++++++++++++++++++++++++++++++++++ 2 files changed, 144 insertions(+), 3 deletions(-) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e99680e257ec..26b9bf480a6f 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -25,6 +25,7 @@ class Bucket(_MetadataMixin): 'acl': 'get_acl', 'defaultObjectAcl': 'get_default_object_acl', 'lifecycle': 'get_lifecycle', + 'cors': 'get_cors', } """Mapping of field name -> accessor for fields w/ custom accessors.""" @@ -443,13 +444,13 @@ def make_public(self, recursive=False, future=False): key.save_acl() def get_lifecycle(self): - """Retrieve CORS policies configured for this bucket. + """Retrieve lifecycle rules configured for this bucket. See: https://cloud.google.com/storage/docs/lifecycle and https://cloud.google.com/storage/docs/json_api/v1/buckets :rtype: list(dict) - :returns: A sequence of mappings describing each CORS policy. + :returns: A sequence of mappings describing each lifecycle rule. """ if not self.has_metadata('lifecycle'): self.reload_metadata() @@ -467,10 +468,62 @@ def update_lifecycle(self, rules): https://cloud.google.com/storage/docs/json_api/v1/buckets :type rules: list(dict) - :param rules: A sequence of mappings describing each lifecycle policy. + :param rules: A sequence of mappings describing each lifecycle rule. """ self.patch_metadata({'lifecycle': {'rule': rules}}) + def get_cors(self): + """Retrieve CORS policies configured for this bucket. + + See: http://www.w3.org/TR/cors/ and + https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: list(dict) + :returns: A sequence of mappings describing each CORS policy. + Keys include 'max_age', 'methods', 'origins', and + 'headers'. + """ + if not self.has_metadata('cors'): + self.reload_metadata() + result = [] + for entry in self.metadata.get('cors', ()): + entry = entry.copy() + result.append(entry) + if 'maxAgeSeconds' in entry: + entry['max_age'] = entry.pop('maxAgeSeconds') + if 'method' in entry: + entry['methods'] = entry.pop('method') + if 'origin' in entry: + entry['origins'] = entry.pop('origin') + if 'responseHeader' in entry: + entry['headers'] = entry.pop('responseHeader') + return result + + def update_cors(self, entries): + """Update CORS policies configured for this bucket. + + See: http://www.w3.org/TR/cors/ and + https://cloud.google.com/storage/docs/json_api/v1/buckets + + :type entries: list(dict) + :param entries: A sequence of mappings describing each CORS policy. + Keys include 'max_age', 'methods', 'origins', and + 'headers'. + """ + to_patch = [] + for entry in entries: + entry = entry.copy() + to_patch.append(entry) + if 'max_age' in entry: + entry['maxAgeSeconds'] = entry.pop('max_age') + if 'methods' in entry: + entry['method'] = entry.pop('methods') + if 'origins' in entry: + entry['origin'] = entry.pop('origins') + if 'headers' in entry: + entry['responseHeader'] = entry.pop('headers') + self.patch_metadata({'cors': to_patch}) + class BucketIterator(Iterator): """An iterator listing all buckets. diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1466ecdde320..7758de2adf85 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -510,6 +510,23 @@ def test_get_metadata_lifecycle_w_default(self): kw = connection._requested self.assertEqual(len(kw), 0) + def test_get_metadata_cors_no_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + self.assertRaises(KeyError, bucket.get_metadata, 'cors') + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_metadata_none_set_cors_w_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + default = object() + self.assertRaises(KeyError, bucket.get_metadata, 'cors', default) + kw = connection._requested + self.assertEqual(len(kw), 0) + def test_get_metadata_miss(self): NAME = 'name' before = {'bar': 'Bar'} @@ -781,6 +798,77 @@ def test_update_lifecycle(self): self.assertEqual(entries[0]['action']['type'], 'Delete') self.assertEqual(entries[0]['condition']['age'], 42) + def test_get_cors_eager(self): + NAME = 'name' + CORS_ENTRY = { + 'maxAgeSeconds': 1234, + 'method': ['OPTIONS', 'GET'], + 'origin': ['127.0.0.1'], + 'responseHeader': ['Content-Type'], + } + before = {'cors': [CORS_ENTRY, {}]} + connection = _Connection() + bucket = self._makeOne(connection, NAME, before) + entries = bucket.get_cors() + self.assertEqual(len(entries), 2) + self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) + self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) + self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) + self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) + self.assertEqual(entries[1], {}) + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_cors_lazy(self): + NAME = 'name' + CORS_ENTRY = { + 'maxAgeSeconds': 1234, + 'method': ['OPTIONS', 'GET'], + 'origin': ['127.0.0.1'], + 'responseHeader': ['Content-Type'], + } + after = {'cors': [CORS_ENTRY]} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME) + entries = bucket.get_cors() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) + self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) + self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) + self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test_update_cors(self): + NAME = 'name' + CORS_ENTRY = { + 'maxAgeSeconds': 1234, + 'method': ['OPTIONS', 'GET'], + 'origin': ['127.0.0.1'], + 'responseHeader': ['Content-Type'], + } + MAPPED = { + 'max_age': 1234, + 'methods': ['OPTIONS', 'GET'], + 'origins': ['127.0.0.1'], + 'headers': ['Content-Type'], + } + after = {'cors': [CORS_ENTRY, {}]} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME) + bucket.update_cors([MAPPED, {}]) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], after) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + entries = bucket.get_cors() + self.assertEqual(entries, [MAPPED, {}]) + class TestBucketIterator(unittest2.TestCase): From b721f4226417e8f3e404b29b1c5477fe84bcde1f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 4 Nov 2014 16:30:37 -0500 Subject: [PATCH 05/20] Add location support to buckets. See: https://cloud.google.com/storage/docs/json_api/v1/buckets and https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations Addresses 'location' part of 314. --- gcloud/storage/bucket.py | 25 ++++++++++++++++++ gcloud/storage/test_bucket.py | 49 +++++++++++++++++++++++++++++++++++ 2 files changed, 74 insertions(+) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e99680e257ec..0a53a648df90 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -25,6 +25,7 @@ class Bucket(_MetadataMixin): 'acl': 'get_acl', 'defaultObjectAcl': 'get_default_object_acl', 'lifecycle': 'get_lifecycle', + 'location': 'get_location', } """Mapping of field name -> accessor for fields w/ custom accessors.""" @@ -471,6 +472,30 @@ def update_lifecycle(self, rules): """ self.patch_metadata({'lifecycle': {'rule': rules}}) + def get_location(self): + """Retrieve location configured for this bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets and + https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations + + :rtype: string + :returns: The configured location. + """ + if not self.has_metadata('location'): + self.reload_metadata() + return self.metadata.get('location') + + def set_location(self, location): + """Update location configured for this bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets and + https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations + + :type location: string + :param location: The new configured location. + """ + self.patch_metadata({'location': location}) + class BucketIterator(Iterator): """An iterator listing all buckets. diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1466ecdde320..70d05a48ccb0 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -510,6 +510,23 @@ def test_get_metadata_lifecycle_w_default(self): kw = connection._requested self.assertEqual(len(kw), 0) + def test_get_metadata_location_no_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + self.assertRaises(KeyError, bucket.get_metadata, 'location') + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_metadata_location_w_default(self): + NAME = 'name' + connection = _Connection() + bucket = self._makeOne(connection, NAME) + default = object() + self.assertRaises(KeyError, bucket.get_metadata, 'location', default) + kw = connection._requested + self.assertEqual(len(kw), 0) + def test_get_metadata_miss(self): NAME = 'name' before = {'bar': 'Bar'} @@ -781,6 +798,38 @@ def test_update_lifecycle(self): self.assertEqual(entries[0]['action']['type'], 'Delete') self.assertEqual(entries[0]['condition']['age'], 42) + def test_get_location_eager(self): + NAME = 'name' + connection = _Connection() + before = {'location': 'AS'} + bucket = self._makeOne(connection, NAME, before) + self.assertEqual(bucket.get_location(), 'AS') + kw = connection._requested + self.assertEqual(len(kw), 0) + + def test_get_location_lazy(self): + NAME = 'name' + connection = _Connection({'location': 'AS'}) + bucket = self._makeOne(connection, NAME) + self.assertEqual(bucket.get_location(), 'AS') + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + + def test_update_location(self): + NAME = 'name' + connection = _Connection({'location': 'AS'}) + bucket = self._makeOne(connection, NAME) + bucket.set_location('AS') + self.assertEqual(bucket.get_location(), 'AS') + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'location': 'AS'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + class TestBucketIterator(unittest2.TestCase): From c9fa1f3fd52f14c08b97b192ad8ae66132c13eb6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 10:39:16 -0500 Subject: [PATCH 06/20] Expanded / explicit coverage and assertions for '_helpers._MetadataMixin'. --- gcloud/storage/test__helpers.py | 118 ++++++++++++++++++++++++++++---- 1 file changed, 105 insertions(+), 13 deletions(-) diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 313b6b452e9e..7a2c7755eefd 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -10,29 +10,121 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_abstract_properties(self): - metadata_object = self._makeOne() - self.assertRaises(NotImplementedError, - lambda: metadata_object.connection) - self.assertRaises(NotImplementedError, - lambda: metadata_object.path) + def _derivedClass(self, connection=None, path=None, **custom_fields): - def test_get_metadata_w_custom_field(self): class Derived(self._getTargetClass()): - CUSTOM_METADATA_FIELDS = {'foo': 'get_foo'} + CUSTOM_METADATA_FIELDS = custom_fields @property - def connection(self): # pragma: NO COVER - return None + def connection(self): + return connection @property - def path(self): # pragma: NO COVER - return None + def path(self): + return path + + return Derived + + def test_connetction_is_abstract(self): + mixin = self._makeOne() + self.assertRaises(NotImplementedError, lambda: mixin.connection) + + def test_path_is_abstract(self): + mixin = self._makeOne() + self.assertRaises(NotImplementedError, lambda: mixin.path) + + def test_has_metadata_not_loaded(self): + mixin = self._makeOne() + self.assertEqual(mixin.has_metadata('nonesuch'), False) + + def test_has_metadata_loaded_no_field(self): + mixin = self._makeOne(metadata={'foo': 'Foo'}) + self.assertEqual(mixin.has_metadata(), True) + + def test_has_metadata_loaded_miss(self): + mixin = self._makeOne(metadata={'foo': 'Foo'}) + self.assertEqual(mixin.has_metadata('nonesuch'), False) + + def test_has_metadata_loaded_hit(self): + mixin = self._makeOne(metadata={'extant': False}) + self.assertEqual(mixin.has_metadata('extant'), True) + + def test_reload_metadata(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + derived.reload_metadata() + self.assertEqual(derived.metadata, {'foo': 'Foo'}) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test_get_metadata_eager_no_field(self): + derived = self._derivedClass()(metadata={'extant': False}) + self.assertEqual(derived.get_metadata(), {'extant': False}) + + def test_get_metadata_eager_hit(self): + derived = self._derivedClass()(metadata={'foo': 'Foo'}) + self.assertEqual(derived.get_metadata('foo'), 'Foo') - derived = Derived() + def test_get_metadata_lazy_hit(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + self.assertEqual(derived.get_metadata('foo'), 'Foo') + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test_get_metadata_w_custom_field(self): + derived = self._derivedClass(foo='get_foo')() try: derived.get_metadata('foo') except KeyError as e: self.assertTrue('get_foo' in str(e)) else: # pragma: NO COVER self.assert_('KeyError not raised') + + def test_patch_metadata(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + self.assertTrue(derived.patch_metadata({'foo': 'Foo'}) is derived) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['data'], {'foo': 'Foo'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_get_acl_not_yet_loaded(self): + class ACL(object): + loaded = False + + def reload(self): + self.loaded = True + + mixin = self._makeOne() + acl = mixin.acl = ACL() + self.assertTrue(mixin.get_acl() is acl) + self.assertTrue(acl.loaded) + + def test_get_acl_already_loaded(self): + class ACL(object): + loaded = True + mixin = self._makeOne() + acl = mixin.acl = ACL() + self.assertTrue(mixin.get_acl() is acl) # no 'reload' + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response From 2c3725dba1ea90af20047e7bea9adb7593dfb85c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 10:57:53 -0500 Subject: [PATCH 07/20] Remove redundant coverage for mixin methods. --- gcloud/storage/test_bucket.py | 140 ---------------------------------- gcloud/storage/test_key.py | 131 ------------------------------- 2 files changed, 271 deletions(-) diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1466ecdde320..297dbc6082a3 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -406,146 +406,6 @@ def upload_from_file(self, fh): bucket.upload_file_object(FILEOBJECT, KEY) self.assertEqual(_uploaded, [(bucket, KEY, FILEOBJECT)]) - def test_has_metdata_none_set(self): - NONESUCH = 'nonesuch' - bucket = self._makeOne() - self.assertFalse(bucket.has_metadata(NONESUCH)) - - def test_has_metdata_miss(self): - NONESUCH = 'nonesuch' - metadata = {'key': 'value'} - bucket = self._makeOne(metadata=metadata) - self.assertFalse(bucket.has_metadata(NONESUCH)) - - def test_has_metdata_none_passed(self): - KEY = 'key' - metadata = {KEY: 'value'} - bucket = self._makeOne(metadata=metadata) - self.assertTrue(bucket.has_metadata()) - - def test_has_metdata_hit(self): - KEY = 'key' - metadata = {KEY: 'value'} - bucket = self._makeOne(metadata=metadata) - self.assertTrue(bucket.has_metadata(KEY)) - - def test_reload_metadata(self): - NAME = 'name' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - found = bucket.reload_metadata() - self.assertTrue(found is bucket) - self.assertEqual(found.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_none_set_none_passed(self): - NAME = 'name' - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME) - found = bucket.get_metadata() - self.assertEqual(found, after) - self.assertEqual(bucket.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_acl_no_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - self.assertRaises(KeyError, bucket.get_metadata, 'acl') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_acl_w_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - default = object() - self.assertRaises(KeyError, bucket.get_metadata, 'acl', default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_defaultObjectAcl_no_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - self.assertRaises(KeyError, bucket.get_metadata, 'defaultObjectAcl') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_none_set_defaultObjectAcl_miss_clear_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - default = object() - self.assertRaises(KeyError, bucket.get_metadata, 'defaultObjectAcl', - default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_lifecycle_no_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - self.assertRaises(KeyError, bucket.get_metadata, 'lifecycle') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_lifecycle_w_default(self): - NAME = 'name' - connection = _Connection() - bucket = self._makeOne(connection, NAME) - default = object() - self.assertRaises(KeyError, bucket.get_metadata, 'lifecycle', default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_miss(self): - NAME = 'name' - before = {'bar': 'Bar'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_metadata('foo'), None) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_hit(self): - NAME = 'name' - before = {'bar': 'Bar'} - connection = _Connection() - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_metadata('bar'), 'Bar') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_patch_metadata(self): - NAME = 'name' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertTrue(bucket.patch_metadata(after) is bucket) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], after) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_configure_website_defaults(self): NAME = 'name' patched = {'website': {'mainPageSuffix': None, diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py index 7ddd77f49cbc..481f7ce0d3a8 100644 --- a/gcloud/storage/test_key.py +++ b/gcloud/storage/test_key.py @@ -326,137 +326,6 @@ def test_upload_from_string(self): self.assertEqual(rq[2]['data'], DATA[5:]) self.assertEqual(rq[2]['headers'], {'Content-Range': 'bytes 5-5/6'}) - def test_has_metdata_none_set(self): - NONESUCH = 'nonesuch' - key = self._makeOne() - self.assertFalse(key.has_metadata(NONESUCH)) - - def test_has_metdata_miss(self): - NONESUCH = 'nonesuch' - metadata = {'key': 'value'} - key = self._makeOne(metadata=metadata) - self.assertFalse(key.has_metadata(NONESUCH)) - - def test_has_metdata_none_passed(self): - KEY = 'key' - metadata = {KEY: 'value'} - key = self._makeOne(metadata=metadata) - self.assertTrue(key.has_metadata()) - - def test_has_metdata_hit(self): - KEY = 'key' - metadata = {KEY: 'value'} - key = self._makeOne(metadata=metadata) - self.assertTrue(key.has_metadata(KEY)) - - def test_reload_metadata(self): - KEY = 'key' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - found = key.reload_metadata() - self.assertTrue(found is key) - self.assertEqual(found.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_none_set_none_passed(self): - KEY = 'key' - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - found = key.get_metadata() - self.assertEqual(found, after) - self.assertEqual(key.metadata, after) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_acl_no_default(self): - KEY = 'key' - connection = _Connection() - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - self.assertRaises(KeyError, key.get_metadata, 'acl') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_acl_w_default(self): - KEY = 'key' - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - default = object() - self.assertRaises(KeyError, key.get_metadata, 'acl', default) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_metadata_miss(self): - KEY = 'key' - before = {'bar': 'Bar'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - self.assertEqual(key.get_metadata('foo'), None) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_get_metadata_hit(self): - KEY = 'key' - before = {'bar': 'Bar'} - connection = _Connection() - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - self.assertEqual(key.get_metadata('bar'), 'Bar') - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_patch_metadata(self): - KEY = 'key' - before = {'foo': 'Foo'} - after = {'bar': 'Bar'} - connection = _Connection(after) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY, before) - self.assertTrue(key.patch_metadata(after) is key) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/name/o/%s' % KEY) - self.assertEqual(kw[0]['data'], after) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - - def test_get_acl_lazy(self): - from gcloud.storage.acl import ObjectACL - KEY = 'key' - connection = _Connection({'items': []}) - bucket = _Bucket(connection) - key = self._makeOne(bucket, KEY) - acl = key.get_acl() - self.assertTrue(acl is key.acl) - self.assertTrue(isinstance(acl, ObjectACL)) - self.assertEqual(list(key.acl), []) - - def test_get_acl_eager(self): - key = self._makeOne() - preset = key.acl - preset.loaded = True - acl = key.get_acl() - self.assertTrue(acl is preset) - def test_make_public(self): from gcloud.storage.acl import _ACLEntity KEY = 'key' From 36966e864b48294f6aa986f0d4b6e460c163bdb9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 11:35:57 -0500 Subject: [PATCH 08/20] Stop using 'metadata' to refer to all properties in bucket/key represetnation. Make helper methods which get / set those properties private. Split some dual-mode methods. --- gcloud/storage/_helpers.py | 154 +++++++++++++++++--------------- gcloud/storage/bucket.py | 42 ++++----- gcloud/storage/key.py | 18 ++-- gcloud/storage/test__helpers.py | 85 +++++++++++------- gcloud/storage/test_bucket.py | 50 +++-------- gcloud/storage/test_key.py | 20 ++--- 6 files changed, 183 insertions(+), 186 deletions(-) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 9f724045a82b..ee5311f29c91 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -4,35 +4,23 @@ """ -class _MetadataMixin(object): - """Abstract mixin for cloud storage classes with associated metadata. +class _PropertyMixin(object): + """Abstract mixin for cloud storage classes with associated propertties. Non-abstract subclasses should implement: - - CUSTOM_METADATA_FIELDS + - CUSTOM_PROPERTY_ACCESSORS - connection - path """ - CUSTOM_METADATA_FIELDS = None + CUSTOM_PROPERTY_ACCESSORS = None """Mapping of field name -> accessor for fields w/ custom accessors. Expected to be set by subclasses. Fields in this mapping will cause - `get_metadata()` to raise a KeyError with a message to use the relevant - accessor methods. + :meth:`_get_property()` to raise a KeyError with a message to use the + relevant accessor methods. """ - def __init__(self, name=None, metadata=None): - """_MetadataMixin constructor. - - :type name: string - :param name: The name of the object. - - :type metadata: dict - :param metadata: All the other data provided by Cloud Storage. - """ - self.name = name - self.metadata = metadata - @property def connection(self): """Abstract getter for the connection to use.""" @@ -43,90 +31,112 @@ def path(self): """Abstract getter for the object path.""" raise NotImplementedError - def has_metadata(self, field=None): - """Check if metadata is available. + def __init__(self, name=None, properties=None): + """_PropertyMixin constructor. - :type field: string - :param field: (optional) the particular field to check for. + :type name: string + :param name: The name of the object. + + :type metadata: dict + :param metadata: All the other data provided by Cloud Storage. + """ + self.name = name + self._properties = {} + if properties is not None: + self._properties.update(properties) - :rtype: bool - :returns: Whether metadata is available locally. + @property + def properties(self): + """Ensure properties are loaded, and return a copy. """ - if not self.metadata: - return False - elif field and field not in self.metadata: - return False - else: - return True + if not self._properties: + self._reload_properties() + return self._properties.copy() - def reload_metadata(self): - """Reload metadata from Cloud Storage. + metadata = properties # Backward-compatibiltiy alias - :rtype: :class:`_MetadataMixin` + def _reload_properties(self): + """Reload properties from Cloud Storage. + + :rtype: :class:`_PropertyMixin` :returns: The object you just reloaded data for. """ # Pass only '?projection=noAcl' here because 'acl' and related # are handled via 'get_acl()' etc. query_params = {'projection': 'noAcl'} - self.metadata = self.connection.api_request( + self._properties = self.connection.api_request( method='GET', path=self.path, query_params=query_params) return self + reload_metadata = _reload_properties # backward-compat alias + + def _patch_properties(self, properties): + """Update particular fields of this object's properties. + + This method will only update the fields provided and will not + touch the other fields. + + It will also reload the properties locally based on the server's + response. - def get_metadata(self, field=None, default=None): - """Get all metadata or a specific field. + :type properties: dict + :param properties: The dictionary of values to update. + + :rtype: :class:`_PropertyMixin` + :returns: The current object. + """ + # Pass '?projection=full' here because 'PATCH' documented not + # to work properly w/ 'noAcl'. + self._properties = self.connection.api_request( + method='PATCH', path=self.path, data=properties, + query_params={'projection': 'full'}) + return self + patch_metadata = _patch_properties # backward-compat alias + + def _has_property(self, field=None): + """Check if property is available. + + :type field: string + :param field: (optional) the particular field to check for. + + :rtype: boolean + :returns: Whether property is available locally. If no ``field`` + passed, return whether *any* properties are available. + """ + if field and field not in self._properties: + return False + return len(self._properties) > 0 + has_metadata = _has_property # backward-compat alias + + def _get_property(self, field, default=None): + """Return the value of a field from the server-side representation. If you request a field that isn't available, and that field can be retrieved by refreshing data from Cloud Storage, this method - will reload the data using :func:`_MetadataMixin.reload_metadata`. + will reload the data using :func:`_PropertyMixin._reload_properties`. :type field: string - :param field: (optional) A particular field to retrieve from metadata. + :param field: A particular field to retrieve from properties. :type default: anything :param default: The value to return if the field provided wasn't found. - :rtype: dict or anything - :returns: All metadata or the value of the specific field. - - :raises: :class:`KeyError` if the field is in CUSTOM_METADATA_FIELDS. + :rtype: anything + :returns: value of the specific field, or the default if not found. """ - # We ignore 'acl' and related fields because they are meant to be - # handled via 'get_acl()' and related methods. - custom = self.CUSTOM_METADATA_FIELDS.get(field) + # Raise for fields which have custom accessors. + custom = self.CUSTOM_PROPERTY_ACCESSORS.get(field) if custom is not None: message = 'Use %s or related methods instead.' % custom raise KeyError((field, message)) - if not self.has_metadata(field=field): - self.reload_metadata() + if not self._properties or field not in self._properties: + self._reload_properties() - if field: - return self.metadata.get(field, default) - else: - return self.metadata - - def patch_metadata(self, metadata): - """Update particular fields of this object's metadata. - - This method will only update the fields provided and will not - touch the other fields. - - It will also reload the metadata locally based on the server's - response. - - :type metadata: dict - :param metadata: The dictionary of values to update. - - :rtype: :class:`_MetadataMixin` - :returns: The current object. - """ - self.metadata = self.connection.api_request( - method='PATCH', path=self.path, data=metadata, - query_params={'projection': 'full'}) - return self + return self._properties.get(field, default) + get_metadata = _get_property # Backward-compat alias def get_acl(self): - """Get ACL metadata as an object. + """Get ACL as an object. :returns: An ACL object for the current object. """ diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e99680e257ec..4fb7d63bf000 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -2,7 +2,7 @@ import os -from gcloud.storage._helpers import _MetadataMixin +from gcloud.storage._helpers import _PropertyMixin from gcloud.storage import exceptions from gcloud.storage.acl import BucketACL from gcloud.storage.acl import DefaultObjectACL @@ -11,7 +11,7 @@ from gcloud.storage.key import _KeyIterator -class Bucket(_MetadataMixin): +class Bucket(_PropertyMixin): """A class representing a Bucket on Cloud Storage. :type connection: :class:`gcloud.storage.connection.Connection` @@ -21,18 +21,18 @@ class Bucket(_MetadataMixin): :param name: The name of the bucket. """ - CUSTOM_METADATA_FIELDS = { + CUSTOM_PROPERTY_ACCESSORS = { 'acl': 'get_acl', 'defaultObjectAcl': 'get_default_object_acl', 'lifecycle': 'get_lifecycle', } - """Mapping of field name -> accessor for fields w/ custom accessors.""" + """Map field name -> accessor for fields w/ custom accessors.""" # ACL rules are lazily retrieved. _acl = _default_object_acl = None - def __init__(self, connection=None, name=None, metadata=None): - super(Bucket, self).__init__(name=name, metadata=metadata) + def __init__(self, connection=None, name=None, properties=None): + super(Bucket, self).__init__(name=name, properties=properties) self._connection = connection @property @@ -60,7 +60,7 @@ def from_dict(cls, bucket_dict, connection=None): :returns: A bucket constructed from the data provided. """ return cls(connection=connection, name=bucket_dict['name'], - metadata=bucket_dict) + properties=bucket_dict) def __repr__(self): return '' % self.name @@ -120,7 +120,7 @@ def get_all_keys(self): """List all the keys in this bucket. This will **not** retrieve all the data for all the keys, it - will only retrieve metadata about the keys. + will only retrieve the keys. This is equivalent to:: @@ -344,7 +344,7 @@ def upload_file_object(self, file_obj, key=None): return key.upload_from_file(file_obj) def configure_website(self, main_page_suffix=None, not_found_page=None): - """Configure website-related metadata. + """Configure website-related properties. .. note:: This (apparently) only works @@ -385,7 +385,7 @@ def configure_website(self, main_page_suffix=None, not_found_page=None): 'notFoundPage': not_found_page, }, } - return self.patch_metadata(data) + return self._patch_properties(data) def disable_website(self): """Disable the website configuration for this bucket. @@ -395,21 +395,11 @@ def disable_website(self): """ return self.configure_website(None, None) - def get_acl(self): - """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. - - :rtype: :class:`gcloud.storage.acl.BucketACL` - :returns: An ACL object for the current bucket. - """ - if not self.acl.loaded: - self.acl.reload() - return self.acl - def get_default_object_acl(self): """Get the current Default Object ACL rules. - If the appropriate metadata isn't available locally, this method - will reload it from Cloud Storage. + If the acl isn't available locally, this method will reload it from + Cloud Storage. :rtype: :class:`gcloud.storage.acl.DefaultObjectACL` :returns: A DefaultObjectACL object for this bucket. @@ -451,10 +441,10 @@ def get_lifecycle(self): :rtype: list(dict) :returns: A sequence of mappings describing each CORS policy. """ - if not self.has_metadata('lifecycle'): - self.reload_metadata() + if not self._has_property('lifecycle'): + self._reload_properties() result = [] - info = self.metadata.get('lifecycle', {}) + info = self._properties.get('lifecycle', {}) for rule in info.get('rule', ()): rule = rule.copy() result.append(rule) @@ -469,7 +459,7 @@ def update_lifecycle(self, rules): :type rules: list(dict) :param rules: A sequence of mappings describing each lifecycle policy. """ - self.patch_metadata({'lifecycle': {'rule': rules}}) + self._patch_properties({'lifecycle': {'rule': rules}}) class BucketIterator(Iterator): diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index cac96b2471f9..b87a7f711d01 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -4,19 +4,19 @@ import os from StringIO import StringIO -from gcloud.storage._helpers import _MetadataMixin +from gcloud.storage._helpers import _PropertyMixin from gcloud.storage.acl import ObjectACL from gcloud.storage.exceptions import StorageError from gcloud.storage.iterator import Iterator -class Key(_MetadataMixin): +class Key(_PropertyMixin): """A wrapper around Cloud Storage's concept of an ``Object``.""" - CUSTOM_METADATA_FIELDS = { + CUSTOM_PROPERTY_ACCESSORS = { 'acl': 'get_acl', } - """Mapping of field name -> accessor for fields w/ custom accessors.""" + """Map field name -> accessor for fields w/ custom accessors.""" CHUNK_SIZE = 1024 * 1024 # 1 MB. """The size of a chunk of data whenever iterating (1 MB). @@ -26,7 +26,7 @@ class Key(_MetadataMixin): # ACL rules are lazily retrieved. _acl = None - def __init__(self, bucket=None, name=None, metadata=None): + def __init__(self, bucket=None, name=None, properties=None): """Key constructor. :type bucket: :class:`gcloud.storage.bucket.Bucket` @@ -36,10 +36,10 @@ def __init__(self, bucket=None, name=None, metadata=None): :param name: The name of the key. This corresponds to the unique path of the object in the bucket. - :type metadata: dict - :param metadata: All the other data provided by Cloud Storage. + :type properties: dict + :param properties: All the other data provided by Cloud Storage. """ - super(Key, self).__init__(name=name, metadata=metadata or {}) + super(Key, self).__init__(name=name, properties=properties) self.bucket = bucket @property @@ -65,7 +65,7 @@ def from_dict(cls, key_dict, bucket=None): :returns: A key based on the data provided. """ - return cls(bucket=bucket, name=key_dict['name'], metadata=key_dict) + return cls(bucket=bucket, name=key_dict['name'], properties=key_dict) def __repr__(self): if self.bucket: diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 7a2c7755eefd..100d48e4ecd2 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -1,11 +1,11 @@ import unittest2 -class Test_MetadataMixin(unittest2.TestCase): +class Test_PropertyMixin(unittest2.TestCase): def _getTargetClass(self): - from gcloud.storage._helpers import _MetadataMixin - return _MetadataMixin + from gcloud.storage._helpers import _PropertyMixin + return _PropertyMixin def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) @@ -13,7 +13,7 @@ def _makeOne(self, *args, **kw): def _derivedClass(self, connection=None, path=None, **custom_fields): class Derived(self._getTargetClass()): - CUSTOM_METADATA_FIELDS = custom_fields + CUSTOM_PROPERTY_ACCESSORS = custom_fields @property def connection(self): @@ -33,64 +33,71 @@ def test_path_is_abstract(self): mixin = self._makeOne() self.assertRaises(NotImplementedError, lambda: mixin.path) - def test_has_metadata_not_loaded(self): + def test__reload_properties(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + derived._reload_properties() + self.assertEqual(derived._properties, {'foo': 'Foo'}) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + + def test__has_property_not_loaded(self): mixin = self._makeOne() - self.assertEqual(mixin.has_metadata('nonesuch'), False) + self.assertEqual(mixin._has_property('nonesuch'), False) - def test_has_metadata_loaded_no_field(self): - mixin = self._makeOne(metadata={'foo': 'Foo'}) - self.assertEqual(mixin.has_metadata(), True) + def test__has_property_loaded_no_field(self): + mixin = self._makeOne(properties={'foo': 'Foo'}) + self.assertEqual(mixin._has_property(), True) - def test_has_metadata_loaded_miss(self): - mixin = self._makeOne(metadata={'foo': 'Foo'}) - self.assertEqual(mixin.has_metadata('nonesuch'), False) + def test__has_property_loaded_miss(self): + mixin = self._makeOne(properties={'foo': 'Foo'}) + self.assertEqual(mixin._has_property('nonesuch'), False) - def test_has_metadata_loaded_hit(self): - mixin = self._makeOne(metadata={'extant': False}) - self.assertEqual(mixin.has_metadata('extant'), True) + def test__has_property_loaded_hit(self): + mixin = self._makeOne(properties={'extant': False}) + self.assertEqual(mixin._has_property('extant'), True) - def test_reload_metadata(self): + def test__get_property_eager_hit(self): + derived = self._derivedClass()(properties={'foo': 'Foo'}) + self.assertEqual(derived._get_property('foo'), 'Foo') + + def test__get_property_eager_miss_w_default(self): connection = _Connection({'foo': 'Foo'}) derived = self._derivedClass(connection, '/path')() - derived.reload_metadata() - self.assertEqual(derived.metadata, {'foo': 'Foo'}) + default = object() + self.assertTrue(derived._get_property('nonesuch', default) is default) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/path') self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - def test_get_metadata_eager_no_field(self): - derived = self._derivedClass()(metadata={'extant': False}) - self.assertEqual(derived.get_metadata(), {'extant': False}) - - def test_get_metadata_eager_hit(self): - derived = self._derivedClass()(metadata={'foo': 'Foo'}) - self.assertEqual(derived.get_metadata('foo'), 'Foo') - - def test_get_metadata_lazy_hit(self): + def test__get_property_lazy_hit(self): connection = _Connection({'foo': 'Foo'}) derived = self._derivedClass(connection, '/path')() - self.assertEqual(derived.get_metadata('foo'), 'Foo') + self.assertTrue(derived._get_property('nonesuch') is None) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/path') self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - def test_get_metadata_w_custom_field(self): + def test__get_property_w_custom_field(self): derived = self._derivedClass(foo='get_foo')() try: - derived.get_metadata('foo') + derived._get_property('foo') except KeyError as e: self.assertTrue('get_foo' in str(e)) else: # pragma: NO COVER self.assert_('KeyError not raised') - def test_patch_metadata(self): + def test__patch_properties(self): connection = _Connection({'foo': 'Foo'}) derived = self._derivedClass(connection, '/path')() - self.assertTrue(derived.patch_metadata({'foo': 'Foo'}) is derived) + self.assertTrue(derived._patch_properties({'foo': 'Foo'}) is derived) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -98,6 +105,20 @@ def test_patch_metadata(self): self.assertEqual(kw[0]['data'], {'foo': 'Foo'}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + def test_properties_eager(self): + derived = self._derivedClass()(properties={'extant': False}) + self.assertEqual(derived.properties, {'extant': False}) + + def test_properties_lazy(self): + connection = _Connection({'foo': 'Foo'}) + derived = self._derivedClass(connection, '/path')() + self.assertEqual(derived.properties, {'foo': 'Foo'}) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/path') + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) + def test_get_acl_not_yet_loaded(self): class ACL(object): loaded = False diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 297dbc6082a3..5c172e600a98 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -16,41 +16,41 @@ def test_ctor_defaults(self): bucket = self._makeOne() self.assertEqual(bucket.connection, None) self.assertEqual(bucket.name, None) - self.assertEqual(bucket.metadata, None) + self.assertEqual(bucket._properties, {}) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) def test_ctor_explicit(self): NAME = 'name' connection = _Connection() - metadata = {'key': 'value'} - bucket = self._makeOne(connection, NAME, metadata) + properties = {'key': 'value'} + bucket = self._makeOne(connection, NAME, properties) self.assertTrue(bucket.connection is connection) self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket.metadata, metadata) + self.assertEqual(bucket._properties, properties) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) def test_from_dict_defaults(self): NAME = 'name' - metadata = {'key': 'value', 'name': NAME} + properties = {'key': 'value', 'name': NAME} klass = self._getTargetClass() - bucket = klass.from_dict(metadata) + bucket = klass.from_dict(properties) self.assertEqual(bucket.connection, None) self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket.metadata, metadata) + self.assertEqual(bucket.properties, properties) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) def test_from_dict_explicit(self): NAME = 'name' connection = _Connection() - metadata = {'key': 'value', 'name': NAME} + properties = {'key': 'value', 'name': NAME} klass = self._getTargetClass() - bucket = klass.from_dict(metadata, connection) + bucket = klass.from_dict(properties, connection) self.assertTrue(bucket.connection is connection) self.assertEqual(bucket.name, NAME) - self.assertEqual(bucket.metadata, metadata) + self.assertEqual(bucket.properties, properties) self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) @@ -413,7 +413,7 @@ def test_configure_website_defaults(self): connection = _Connection(patched) bucket = self._makeOne(connection, NAME) self.assertTrue(bucket.configure_website() is bucket) - self.assertEqual(bucket.metadata, patched) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -428,7 +428,7 @@ def test_configure_website_explicit(self): connection = _Connection(patched) bucket = self._makeOne(connection, NAME) self.assertTrue(bucket.configure_website('html', '404.html') is bucket) - self.assertEqual(bucket.metadata, patched) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -443,7 +443,7 @@ def test_disable_website(self): connection = _Connection(patched) bucket = self._makeOne(connection, NAME) self.assertTrue(bucket.disable_website() is bucket) - self.assertEqual(bucket.metadata, patched) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -451,30 +451,6 @@ def test_disable_website(self): self.assertEqual(kw[0]['data'], patched) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_get_acl_lazy(self): - from gcloud.storage.acl import BucketACL - NAME = 'name' - connection = _Connection({'items': []}) - bucket = self._makeOne(connection, NAME) - acl = bucket.get_acl() - self.assertTrue(acl is bucket.acl) - self.assertTrue(isinstance(acl, BucketACL)) - self.assertEqual(list(bucket.acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s/acl' % NAME) - - def test_get_acl_eager(self): - connection = _Connection() - bucket = self._makeOne() - preset = bucket.acl # Ensure it is assigned - preset.loaded = True - acl = bucket.get_acl() - self.assertTrue(acl is preset) - kw = connection._requested - self.assertEqual(len(kw), 0) - def test_get_default_object_acl_lazy(self): from gcloud.storage.acl import BucketACL NAME = 'name' diff --git a/gcloud/storage/test_key.py b/gcloud/storage/test_key.py index 481f7ce0d3a8..7fdeb2733efe 100644 --- a/gcloud/storage/test_key.py +++ b/gcloud/storage/test_key.py @@ -15,43 +15,43 @@ def test_ctor_defaults(self): self.assertEqual(key.bucket, None) self.assertEqual(key.connection, None) self.assertEqual(key.name, None) - self.assertEqual(key.metadata, {}) + self.assertEqual(key._properties, {}) self.assertTrue(key._acl is None) def test_ctor_explicit(self): KEY = 'key' connection = _Connection() bucket = _Bucket(connection) - metadata = {'key': 'value'} - key = self._makeOne(bucket, KEY, metadata) + properties = {'key': 'value'} + key = self._makeOne(bucket, KEY, properties) self.assertTrue(key.bucket is bucket) self.assertTrue(key.connection is connection) self.assertEqual(key.name, KEY) - self.assertEqual(key.metadata, metadata) + self.assertEqual(key.properties, properties) self.assertTrue(key._acl is None) def test_from_dict_defaults(self): KEY = 'key' - metadata = {'key': 'value', 'name': KEY} + properties = {'key': 'value', 'name': KEY} klass = self._getTargetClass() - key = klass.from_dict(metadata) + key = klass.from_dict(properties) self.assertEqual(key.bucket, None) self.assertEqual(key.connection, None) self.assertEqual(key.name, KEY) - self.assertEqual(key.metadata, metadata) + self.assertEqual(key.properties, properties) self.assertTrue(key._acl is None) def test_from_dict_explicit(self): KEY = 'key' connection = _Connection() bucket = _Bucket(connection) - metadata = {'key': 'value', 'name': KEY} + properties = {'key': 'value', 'name': KEY} klass = self._getTargetClass() - key = klass.from_dict(metadata, bucket) + key = klass.from_dict(properties, bucket) self.assertTrue(key.bucket is bucket) self.assertTrue(key.connection is connection) self.assertEqual(key.name, KEY) - self.assertEqual(key.metadata, metadata) + self.assertEqual(key.properties, properties) self.assertTrue(key._acl is None) def test_acl_property(self): From 79e0b05dffa22a1919f2e35ed1e43e492aeba38b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 15:55:41 -0500 Subject: [PATCH 09/20] Wrap method / attr name in single quotes. --- gcloud/storage/_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index ee5311f29c91..023655c3e6d9 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -126,7 +126,7 @@ def _get_property(self, field, default=None): # Raise for fields which have custom accessors. custom = self.CUSTOM_PROPERTY_ACCESSORS.get(field) if custom is not None: - message = 'Use %s or related methods instead.' % custom + message = "Use '%s' or related methods instead." % custom raise KeyError((field, message)) if not self._properties or field not in self._properties: From 753389991b3849030e2b3df3853715603c134906 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 15:56:25 -0500 Subject: [PATCH 10/20] Add read-only properties for non-writable bucket fields. Addresses last part of #314. --- gcloud/storage/bucket.py | 109 +++++++++++++++++++++++++++++++++- gcloud/storage/key.py | 2 +- gcloud/storage/test_bucket.py | 50 ++++++++++++++++ 3 files changed, 157 insertions(+), 4 deletions(-) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 4fb7d63bf000..83f53d8cc9b9 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -22,9 +22,18 @@ class Bucket(_PropertyMixin): """ CUSTOM_PROPERTY_ACCESSORS = { - 'acl': 'get_acl', - 'defaultObjectAcl': 'get_default_object_acl', - 'lifecycle': 'get_lifecycle', + 'acl': 'get_acl()', + 'defaultObjectAcl': 'get_default_object_acl()', + 'lifecycle': 'get_lifecycle()', + 'etag': 'etag', + 'id': 'id', + 'metageneration': 'metageneration', + 'name': 'name', + 'owner': 'owner', + 'projectNumber': 'project_number', + 'selfLink': 'self_link', + 'storageClass': 'storage_class', + 'timeCreated': 'time_created', } """Map field name -> accessor for fields w/ custom accessors.""" @@ -461,6 +470,100 @@ def update_lifecycle(self, rules): """ self._patch_properties({'lifecycle': {'rule': rules}}) + @property + def etag(self): + """Retrieve the ETag for the bucket. + + See: http://tools.ietf.org/html/rfc2616#section-3.11 and + https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: string + :returns: a unique identifier for the bucket and current metadata. + """ + return self.properties['etag'] + + @property + def id(self): + """Retrieve the ID for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: string + :returns: a unique identifier for the bucket. + """ + return self.properties['id'] + + @property + def metageneration(self): + """Retrieve the ID for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: integer + :returns: count of times since creation the bucket's metadata has + been updated. + """ + return self.properties['metageneration'] + + @property + def owner(self): + """Retrieve the ID for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: dict + :returns: mapping of owner's role/ID. + """ + owner = self.properties['owner'].copy() + owner['id'] = owner.pop('entityId') + return owner + + @property + def project_number(self): + """Retrieve the ID for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: integer + :returns: a unique identifier for the bucket. + """ + return self.properties['projectNumber'] + + @property + def self_link(self): + """Retrieve the URI for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: string + :returns: URI of the bucket. + """ + return self.properties['selfLink'] + + @property + def storage_class(self): + """Retrieve the ID for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets and + https://cloud.google.com/storage/docs/durable-reduced-availability#_DRA_Bucket + + :rtype: string + :returns: the storage class for the bucket (currently one of + ``STANDARD``, ``DURABLE_REDUCED_AVAILABILITY``) + """ + return self.properties['storageClass'] + + @property + def time_created(self): + """Retrieve the ID for the bucket. + + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: string + :returns: timestamp for the bucket's creation, in RFC 3339 format. + """ + return self.properties['timeCreated'] + class BucketIterator(Iterator): """An iterator listing all buckets. diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index b87a7f711d01..935d71c98c7a 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -14,7 +14,7 @@ class Key(_PropertyMixin): """A wrapper around Cloud Storage's concept of an ``Object``.""" CUSTOM_PROPERTY_ACCESSORS = { - 'acl': 'get_acl', + 'acl': 'get_acl()', } """Map field name -> accessor for fields w/ custom accessors.""" diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 5c172e600a98..ca4ea40970cd 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -617,6 +617,56 @@ def test_update_lifecycle(self): self.assertEqual(entries[0]['action']['type'], 'Delete') self.assertEqual(entries[0]['condition']['age'], 42) + def test_etag(self): + ETAG = 'ETAG' + properties = {'etag': ETAG} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.etag, ETAG) + + def test_id(self): + ID = 'ID' + properties = {'id': ID} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.id, ID) + + def test_metageneration(self): + METAGENERATION = 42 + properties = {'metageneration': METAGENERATION} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.metageneration, METAGENERATION) + + def test_owner(self): + OWNER = {'entity': 'project-owner-12345', 'entityId': '23456'} + properties = {'owner': OWNER} + bucket = self._makeOne(properties=properties) + owner = bucket.owner + self.assertEqual(owner['entity'], 'project-owner-12345') + self.assertEqual(owner['id'], '23456') + + def test_project_number(self): + PROJECT_NUMBER = 12345 + properties = {'projectNumber': PROJECT_NUMBER} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.project_number, PROJECT_NUMBER) + + def test_self_link(self): + SELF_LINK = 'http://example.com/self/' + properties = {'selfLink': SELF_LINK} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.self_link, SELF_LINK) + + def test_storage_class(self): + STORAGE_CLASS = 'http://example.com/self/' + properties = {'storageClass': STORAGE_CLASS} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.storage_class, STORAGE_CLASS) + + def test_time_created(self): + TIME_CREATED = '2014-11-05T20:34:37Z' + properties = {'timeCreated': TIME_CREATED} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.time_created, TIME_CREATED) + class TestBucketIterator(unittest2.TestCase): From c3fcb2ee021da4fc433576ac46fd594458e5cb45 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 16:42:38 -0500 Subject: [PATCH 11/20] Re-order methods to match field order. --- gcloud/storage/bucket.py | 372 +++++++++++----------- gcloud/storage/test_bucket.py | 584 +++++++++++++++++----------------- 2 files changed, 478 insertions(+), 478 deletions(-) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index e82176961c94..638a84f9ce8c 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -48,20 +48,6 @@ def __init__(self, connection=None, name=None, properties=None): super(Bucket, self).__init__(name=name, properties=properties) self._connection = connection - @property - def acl(self): - """Create our ACL on demand.""" - if self._acl is None: - self._acl = BucketACL(self) - return self._acl - - @property - def default_object_acl(self): - """Create our defaultObjectACL on demand.""" - if self._default_object_acl is None: - self._default_object_acl = DefaultObjectACL(self) - return self._default_object_acl - @classmethod def from_dict(cls, bucket_dict, connection=None): """Construct a new bucket from a dictionary of data from Cloud Storage. @@ -84,6 +70,20 @@ def __iter__(self): def __contains__(self, key): return self.get_key(key) is not None + @property + def acl(self): + """Create our ACL on demand.""" + if self._acl is None: + self._acl = BucketACL(self) + return self._acl + + @property + def default_object_acl(self): + """Create our defaultObjectACL on demand.""" + if self._default_object_acl is None: + self._default_object_acl = DefaultObjectACL(self) + return self._default_object_acl + @property def connection(self): """Getter property for the connection to use with this Bucket. @@ -356,57 +356,57 @@ def upload_file_object(self, file_obj, key=None): key = self.new_key(os.path.basename(file_obj.name)) return key.upload_from_file(file_obj) - def configure_website(self, main_page_suffix=None, not_found_page=None): - """Configure website-related properties. - - .. note:: - This (apparently) only works - if your bucket name is a domain name - (and to do that, you need to get approved somehow...). - - Check out the official documentation here: - https://developers.google.com/storage/docs/website-configuration - - If you want this bucket to host a website, just provide the name - of an index page and a page to use when a key isn't found:: - - >>> from gcloud import storage - >>> connection = storage.get_connection(project, email, - private_key_path) - >>> bucket = connection.get_bucket(bucket_name) - >>> bucket.configure_website('index.html', '404.html') - - You probably should also make the whole bucket public:: - - >>> bucket.make_public(recursive=True, future=True) - - This says: "Make the bucket public, and all the stuff already in - the bucket, and anything else I add to the bucket. Just make it - all public." + def get_cors(self): + """Retrieve CORS policies configured for this bucket. - :type main_page_suffix: string - :param main_page_suffix: The page to use as the main page - of a directory. - Typically something like index.html. + See: http://www.w3.org/TR/cors/ and + https://cloud.google.com/storage/docs/json_api/v1/buckets - :type not_found_page: string - :param not_found_page: The file to use when a page isn't found. + :rtype: list(dict) + :returns: A sequence of mappings describing each CORS policy. + Keys include 'max_age', 'methods', 'origins', and + 'headers'. """ - data = { - 'website': { - 'mainPageSuffix': main_page_suffix, - 'notFoundPage': not_found_page, - }, - } - return self._patch_properties(data) + if not self.has_metadata('cors'): + self.reload_metadata() + result = [] + for entry in self.metadata.get('cors', ()): + entry = entry.copy() + result.append(entry) + if 'maxAgeSeconds' in entry: + entry['max_age'] = entry.pop('maxAgeSeconds') + if 'method' in entry: + entry['methods'] = entry.pop('method') + if 'origin' in entry: + entry['origins'] = entry.pop('origin') + if 'responseHeader' in entry: + entry['headers'] = entry.pop('responseHeader') + return result - def disable_website(self): - """Disable the website configuration for this bucket. + def update_cors(self, entries): + """Update CORS policies configured for this bucket. - This is really just a shortcut for setting the website-related - attributes to ``None``. + See: http://www.w3.org/TR/cors/ and + https://cloud.google.com/storage/docs/json_api/v1/buckets + + :type entries: list(dict) + :param entries: A sequence of mappings describing each CORS policy. + Keys include 'max_age', 'methods', 'origins', and + 'headers'. """ - return self.configure_website(None, None) + to_patch = [] + for entry in entries: + entry = entry.copy() + to_patch.append(entry) + if 'max_age' in entry: + entry['maxAgeSeconds'] = entry.pop('max_age') + if 'methods' in entry: + entry['method'] = entry.pop('methods') + if 'origins' in entry: + entry['origin'] = entry.pop('origins') + if 'headers' in entry: + entry['responseHeader'] = entry.pop('headers') + self.patch_metadata({'cors': to_patch}) def get_default_object_acl(self): """Get the current Default Object ACL rules. @@ -421,29 +421,28 @@ def get_default_object_acl(self): self.default_object_acl.reload() return self.default_object_acl - def make_public(self, recursive=False, future=False): - """Make a bucket public. + @property + def etag(self): + """Retrieve the ETag for the bucket. - :type recursive: bool - :param recursive: If True, this will make all keys inside the bucket - public as well. + See: http://tools.ietf.org/html/rfc2616#section-3.11 and + https://cloud.google.com/storage/docs/json_api/v1/buckets - :type future: bool - :param future: If True, this will make all objects created in the - future public as well. + :rtype: string + :returns: a unique identifier for the bucket and current metadata. """ - self.get_acl().all().grant_read() - self.acl.save() + return self.properties['etag'] - if future: - doa = self.get_default_object_acl() - doa.all().grant_read() - doa.save() + @property + def id(self): + """Retrieve the ID for the bucket. - if recursive: - for key in self: - key.get_acl().all().grant_read() - key.save_acl() + See: https://cloud.google.com/storage/docs/json_api/v1/buckets + + :rtype: string + :returns: a unique identifier for the bucket. + """ + return self.properties['id'] def get_lifecycle(self): """Retrieve lifecycle rules configured for this bucket. @@ -474,28 +473,68 @@ def update_lifecycle(self, rules): """ self._patch_properties({'lifecycle': {'rule': rules}}) - @property - def etag(self): - """Retrieve the ETag for the bucket. + def get_location(self): + """Retrieve location configured for this bucket. - See: http://tools.ietf.org/html/rfc2616#section-3.11 and - https://cloud.google.com/storage/docs/json_api/v1/buckets + See: https://cloud.google.com/storage/docs/json_api/v1/buckets and + https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations :rtype: string - :returns: a unique identifier for the bucket and current metadata. + :returns: The configured location. """ - return self.properties['etag'] + if not self.has_metadata('location'): + self.reload_metadata() + return self.metadata.get('location') - @property - def id(self): - """Retrieve the ID for the bucket. + def set_location(self, location): + """Update location configured for this bucket. - See: https://cloud.google.com/storage/docs/json_api/v1/buckets + See: https://cloud.google.com/storage/docs/json_api/v1/buckets and + https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations - :rtype: string - :returns: a unique identifier for the bucket. + :type location: string + :param location: The new configured location. """ - return self.properties['id'] + self.patch_metadata({'location': location}) + + def get_logging(self): + """Return info about access logging for this bucket. + + See: https://cloud.google.com/storage/docs/accesslogs#status + + :rtype: dict or None + :returns: a dict w/ keys, ``bucket_name`` and ``object_prefix`` + (if logging is enabled), or None (if not). + """ + if not self.has_metadata('logging'): + self.reload_metadata() + info = self.metadata.get('logging') + if info is not None: + info = info.copy() + info['bucket_name'] = info.pop('logBucket') + info['object_prefix'] = info.pop('logObjectPrefix', '') + return info + + def enable_logging(self, bucket_name, object_prefix=''): + """Enable access logging for this bucket. + + See: https://cloud.google.com/storage/docs/accesslogs#delivery + + :type bucket_name: string + :param bucket_name: name of bucket in which to store access logs + + :type object_prefix: string + :param object_prefix: prefix for access log filenames + """ + info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix} + self.patch_metadata({'logging': info}) + + def disable_logging(self): + """Disable access logging for this bucket. + + See: https://cloud.google.com/storage/docs/accesslogs#disabling + """ + self.patch_metadata({'logging': None}) @property def metageneration(self): @@ -553,7 +592,7 @@ def storage_class(self): :rtype: string :returns: the storage class for the bucket (currently one of - ``STANDARD``, ``DURABLE_REDUCED_AVAILABILITY``) + ``STANDARD``, ``DURABLE_REDUCED_AVAILABILITY``) """ return self.properties['storageClass'] @@ -598,120 +637,81 @@ def disable_versioning(self): """ self.patch_metadata({'versioning': {'enabled': False}}) - def get_logging(self): - """Return info about access logging for this bucket. - - See: https://cloud.google.com/storage/docs/accesslogs#status - :rtype: dict or None - :returns: a dict w/ keys, ``bucket_name`` and ``object_prefix`` - (if logging is enabled), or None (if not). - """ - if not self.has_metadata('logging'): - self.reload_metadata() - info = self.metadata.get('logging') - if info is not None: - info = info.copy() - info['bucket_name'] = info.pop('logBucket') - info['object_prefix'] = info.pop('logObjectPrefix', '') - return info + def configure_website(self, main_page_suffix=None, not_found_page=None): + """Configure website-related properties. - def enable_logging(self, bucket_name, object_prefix=''): - """Enable access logging for this bucket. + See: https://developers.google.com/storage/docs/website-configuration - See: https://cloud.google.com/storage/docs/accesslogs#delivery + .. note:: + This (apparently) only works + if your bucket name is a domain name + (and to do that, you need to get approved somehow...). - :type bucket_name: string - :param bucket_name: name of bucket in which to store access logs + If you want this bucket to host a website, just provide the name + of an index page and a page to use when a key isn't found:: - :type object_prefix: string - :param object_prefix: prefix for access log filenames - """ - info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix} - self.patch_metadata({'logging': info}) + >>> from gcloud import storage + >>> connection = storage.get_connection(project, email, + private_key_path) + >>> bucket = connection.get_bucket(bucket_name) + >>> bucket.configure_website('index.html', '404.html') - def disable_logging(self): - """Disable access logging for this bucket. + You probably should also make the whole bucket public:: - See: https://cloud.google.com/storage/docs/accesslogs#disabling - """ - self.patch_metadata({'logging': None}) + >>> bucket.make_public(recursive=True, future=True) - def get_cors(self): - """Retrieve CORS policies configured for this bucket. + This says: "Make the bucket public, and all the stuff already in + the bucket, and anything else I add to the bucket. Just make it + all public." - See: http://www.w3.org/TR/cors/ and - https://cloud.google.com/storage/docs/json_api/v1/buckets + :type main_page_suffix: string + :param main_page_suffix: The page to use as the main page + of a directory. + Typically something like index.html. - :rtype: list(dict) - :returns: A sequence of mappings describing each CORS policy. - Keys include 'max_age', 'methods', 'origins', and - 'headers'. + :type not_found_page: string + :param not_found_page: The file to use when a page isn't found. """ - if not self.has_metadata('cors'): - self.reload_metadata() - result = [] - for entry in self.metadata.get('cors', ()): - entry = entry.copy() - result.append(entry) - if 'maxAgeSeconds' in entry: - entry['max_age'] = entry.pop('maxAgeSeconds') - if 'method' in entry: - entry['methods'] = entry.pop('method') - if 'origin' in entry: - entry['origins'] = entry.pop('origin') - if 'responseHeader' in entry: - entry['headers'] = entry.pop('responseHeader') - return result - - def update_cors(self, entries): - """Update CORS policies configured for this bucket. + data = { + 'website': { + 'mainPageSuffix': main_page_suffix, + 'notFoundPage': not_found_page, + }, + } + return self._patch_properties(data) - See: http://www.w3.org/TR/cors/ and - https://cloud.google.com/storage/docs/json_api/v1/buckets + def disable_website(self): + """Disable the website configuration for this bucket. - :type entries: list(dict) - :param entries: A sequence of mappings describing each CORS policy. - Keys include 'max_age', 'methods', 'origins', and - 'headers'. + This is really just a shortcut for setting the website-related + attributes to ``None``. """ - to_patch = [] - for entry in entries: - entry = entry.copy() - to_patch.append(entry) - if 'max_age' in entry: - entry['maxAgeSeconds'] = entry.pop('max_age') - if 'methods' in entry: - entry['method'] = entry.pop('methods') - if 'origins' in entry: - entry['origin'] = entry.pop('origins') - if 'headers' in entry: - entry['responseHeader'] = entry.pop('headers') - self.patch_metadata({'cors': to_patch}) + return self.configure_website(None, None) - def get_location(self): - """Retrieve location configured for this bucket. + def make_public(self, recursive=False, future=False): + """Make a bucket public. - See: https://cloud.google.com/storage/docs/json_api/v1/buckets and - https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations + :type recursive: bool + :param recursive: If True, this will make all keys inside the bucket + public as well. - :rtype: string - :returns: The configured location. + :type future: bool + :param future: If True, this will make all objects created in the + future public as well. """ - if not self.has_metadata('location'): - self.reload_metadata() - return self.metadata.get('location') - - def set_location(self, location): - """Update location configured for this bucket. + self.get_acl().all().grant_read() + self.acl.save() - See: https://cloud.google.com/storage/docs/json_api/v1/buckets and - https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations + if future: + doa = self.get_default_object_acl() + doa.all().grant_read() + doa.save() - :type location: string - :param location: The new configured location. - """ - self.patch_metadata({'location': location}) + if recursive: + for key in self: + key.get_acl().all().grant_read() + key.save_acl() class BucketIterator(Iterator): diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 69331f8acb6e..1482790d09d1 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -54,20 +54,6 @@ def test_from_dict_explicit(self): self.assertTrue(bucket._acl is None) self.assertTrue(bucket._default_object_acl is None) - def test_acl_property(self): - from gcloud.storage.acl import BucketACL - bucket = self._makeOne() - acl = bucket.acl - self.assertTrue(isinstance(acl, BucketACL)) - self.assertTrue(acl is bucket._acl) - - def test_default_object_acl_property(self): - from gcloud.storage.acl import DefaultObjectACL - bucket = self._makeOne() - acl = bucket.default_object_acl - self.assertTrue(isinstance(acl, DefaultObjectACL)) - self.assertTrue(acl is bucket._default_object_acl) - def test___iter___empty(self): NAME = 'name' connection = _Connection({'items': []}) @@ -113,6 +99,20 @@ def test___contains___hit(self): self.assertEqual(kw['method'], 'GET') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, KEY)) + def test_acl_property(self): + from gcloud.storage.acl import BucketACL + bucket = self._makeOne() + acl = bucket.acl + self.assertTrue(isinstance(acl, BucketACL)) + self.assertTrue(acl is bucket._acl) + + def test_default_object_acl_property(self): + from gcloud.storage.acl import DefaultObjectACL + bucket = self._makeOne() + acl = bucket.default_object_acl + self.assertTrue(isinstance(acl, DefaultObjectACL)) + self.assertTrue(acl is bucket._default_object_acl) + def test_path_no_name(self): bucket = self._makeOne() self.assertRaises(ValueError, getattr, bucket, 'path') @@ -406,50 +406,76 @@ def upload_from_file(self, fh): bucket.upload_file_object(FILEOBJECT, KEY) self.assertEqual(_uploaded, [(bucket, KEY, FILEOBJECT)]) - def test_configure_website_defaults(self): + def test_get_cors_eager(self): NAME = 'name' - patched = {'website': {'mainPageSuffix': None, - 'notFoundPage': None}} - connection = _Connection(patched) - bucket = self._makeOne(connection, NAME) - self.assertTrue(bucket.configure_website() is bucket) - self.assertEqual(bucket.properties, patched) + CORS_ENTRY = { + 'maxAgeSeconds': 1234, + 'method': ['OPTIONS', 'GET'], + 'origin': ['127.0.0.1'], + 'responseHeader': ['Content-Type'], + } + before = {'cors': [CORS_ENTRY, {}]} + connection = _Connection() + bucket = self._makeOne(connection, NAME, before) + entries = bucket.get_cors() + self.assertEqual(len(entries), 2) + self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) + self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) + self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) + self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) + self.assertEqual(entries[1], {}) kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], patched) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(len(kw), 0) - def test_configure_website_explicit(self): + def test_get_cors_lazy(self): NAME = 'name' - patched = {'website': {'mainPageSuffix': 'html', - 'notFoundPage': '404.html'}} - connection = _Connection(patched) + CORS_ENTRY = { + 'maxAgeSeconds': 1234, + 'method': ['OPTIONS', 'GET'], + 'origin': ['127.0.0.1'], + 'responseHeader': ['Content-Type'], + } + after = {'cors': [CORS_ENTRY]} + connection = _Connection(after) bucket = self._makeOne(connection, NAME) - self.assertTrue(bucket.configure_website('html', '404.html') is bucket) - self.assertEqual(bucket.properties, patched) + entries = bucket.get_cors() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) + self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) + self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) + self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], patched) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - def test_disable_website(self): + def test_update_cors(self): NAME = 'name' - patched = {'website': {'mainPageSuffix': None, - 'notFoundPage': None}} - connection = _Connection(patched) + CORS_ENTRY = { + 'maxAgeSeconds': 1234, + 'method': ['OPTIONS', 'GET'], + 'origin': ['127.0.0.1'], + 'responseHeader': ['Content-Type'], + } + MAPPED = { + 'max_age': 1234, + 'methods': ['OPTIONS', 'GET'], + 'origins': ['127.0.0.1'], + 'headers': ['Content-Type'], + } + after = {'cors': [CORS_ENTRY, {}]} + connection = _Connection(after) bucket = self._makeOne(connection, NAME) - self.assertTrue(bucket.disable_website() is bucket) - self.assertEqual(bucket.properties, patched) + bucket.update_cors([MAPPED, {}]) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], patched) + self.assertEqual(kw[0]['data'], after) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + entries = bucket.get_cors() + self.assertEqual(entries, [MAPPED, {}]) def test_get_default_object_acl_lazy(self): from gcloud.storage.acl import BucketACL @@ -475,100 +501,17 @@ def test_get_default_object_acl_eager(self): kw = connection._requested self.assertEqual(len(kw), 0) - def test_make_public_defaults(self): - from gcloud.storage.acl import _ACLEntity - NAME = 'name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after = {'acl': permissive, 'defaultObjectAcl': []} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME) - bucket.acl.loaded = True - bucket.make_public() - self.assertEqual(list(bucket.acl), permissive) - self.assertEqual(list(bucket.default_object_acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': after['acl']}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - - def test_make_public_w_future(self): - from gcloud.storage.acl import _ACLEntity - NAME = 'name' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after1 = {'acl': permissive, 'defaultObjectAcl': []} - after2 = {'acl': permissive, 'defaultObjectAcl': permissive} - connection = _Connection(after1, after2) - bucket = self._makeOne(connection, NAME) - bucket.acl.loaded = True - bucket.default_object_acl.loaded = True - bucket.make_public(future=True) - self.assertEqual(list(bucket.acl), permissive) - self.assertEqual(list(bucket.default_object_acl), permissive) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': permissive}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - self.assertEqual(kw[1]['method'], 'PATCH') - self.assertEqual(kw[1]['path'], '/b/%s' % NAME) - self.assertEqual(kw[1]['data'], {'defaultObjectAcl': permissive}) - self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) - - def test_make_public_recursive(self): - from gcloud.storage.acl import _ACLEntity - from gcloud._testing import _Monkey - from gcloud.storage import key - from gcloud.storage import bucket as MUT - _saved = [] - - class _Key(object): - _granted = False - - def __init__(self, bucket, name): - self._bucket = bucket - self._name = name - - def get_acl(self): - return self - - def all(self): - return self - - def grant_read(self): - self._granted = True - - def save_acl(self): - _saved.append((self._bucket, self._name, self._granted)) - - class _KeyIterator(key._KeyIterator): - def get_items_from_response(self, response): - for item in response.get('items', []): - yield _Key(self.bucket, item['name']) + def test_etag(self): + ETAG = 'ETAG' + properties = {'etag': ETAG} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.etag, ETAG) - NAME = 'name' - KEY = 'key' - permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] - after = {'acl': permissive, 'defaultObjectAcl': []} - connection = _Connection(after, {'items': [{'name': KEY}]}) - bucket = self._makeOne(connection, NAME) - bucket.acl.loaded = True - with _Monkey(MUT, _KeyIterator=_KeyIterator): - bucket.make_public(recursive=True) - self.assertEqual(list(bucket.acl), permissive) - self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, KEY, True)]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'acl': permissive}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - self.assertEqual(kw[1]['method'], 'GET') - self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME) - self.assertEqual(kw[1]['query_params'], {}) + def test_id(self): + ID = 'ID' + properties = {'id': ID} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.id, ID) def test_get_lifecycle_eager(self): NAME = 'name' @@ -617,106 +560,36 @@ def test_update_lifecycle(self): self.assertEqual(entries[0]['action']['type'], 'Delete') self.assertEqual(entries[0]['condition']['age'], 42) - def test_etag(self): - ETAG = 'ETAG' - properties = {'etag': ETAG} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.etag, ETAG) - - def test_id(self): - ID = 'ID' - properties = {'id': ID} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.id, ID) - - def test_metageneration(self): - METAGENERATION = 42 - properties = {'metageneration': METAGENERATION} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.metageneration, METAGENERATION) + def test_get_location_eager(self): + NAME = 'name' + connection = _Connection() + before = {'location': 'AS'} + bucket = self._makeOne(connection, NAME, before) + self.assertEqual(bucket.get_location(), 'AS') + kw = connection._requested + self.assertEqual(len(kw), 0) - def test_owner(self): - OWNER = {'entity': 'project-owner-12345', 'entityId': '23456'} - properties = {'owner': OWNER} - bucket = self._makeOne(properties=properties) - owner = bucket.owner - self.assertEqual(owner['entity'], 'project-owner-12345') - self.assertEqual(owner['id'], '23456') + def test_get_location_lazy(self): + NAME = 'name' + connection = _Connection({'location': 'AS'}) + bucket = self._makeOne(connection, NAME) + self.assertEqual(bucket.get_location(), 'AS') + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - def test_project_number(self): - PROJECT_NUMBER = 12345 - properties = {'projectNumber': PROJECT_NUMBER} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.project_number, PROJECT_NUMBER) - - def test_self_link(self): - SELF_LINK = 'http://example.com/self/' - properties = {'selfLink': SELF_LINK} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.self_link, SELF_LINK) - - def test_storage_class(self): - STORAGE_CLASS = 'http://example.com/self/' - properties = {'storageClass': STORAGE_CLASS} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.storage_class, STORAGE_CLASS) - - def test_time_created(self): - TIME_CREATED = '2014-11-05T20:34:37Z' - properties = {'timeCreated': TIME_CREATED} - bucket = self._makeOne(properties=properties) - self.assertEqual(bucket.time_created, TIME_CREATED) - - def test_get_versioning_eager(self): - NAME = 'name' - before = {'bar': 'Bar', 'versioning': {'enabled': True}} - connection = _Connection() - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_versioning(), True) - kw = connection._requested - self.assertEqual(len(kw), 0) - - def test_get_versioning_lazy(self): - NAME = 'name' - before = {'bar': 'Bar'} - after = {'bar': 'Bar', 'versioning': {'enabled': True}} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_versioning(), True) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') - self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - - def test_enable_versioning(self): - NAME = 'name' - before = {'versioning': {'enabled': False}} - after = {'versioning': {'enabled': True}} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertFalse(bucket.get_versioning()) - bucket.enable_versioning() - self.assertTrue(bucket.get_versioning()) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['data'], {'versioning': {'enabled': True}}) - self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - - def test_disable_versioning(self): + def test_update_location(self): NAME = 'name' - before = {'versioning': {'enabled': True}} - after = {'versioning': {'enabled': False}} - connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) - self.assertTrue(bucket.get_versioning()) - bucket.disable_versioning() - self.assertFalse(bucket.get_versioning()) + connection = _Connection({'location': 'AS'}) + bucket = self._makeOne(connection, NAME) + bucket.set_location('AS') + self.assertEqual(bucket.get_location(), 'AS') kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') - self.assertEqual(kw[0]['data'], after) + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'location': 'AS'}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) def test_get_logging_eager_w_prefix(self): @@ -805,108 +678,235 @@ def test_disable_logging(self): self.assertEqual(kw[0]['data'], {'logging': None}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_get_cors_eager(self): + def test_metageneration(self): + METAGENERATION = 42 + properties = {'metageneration': METAGENERATION} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.metageneration, METAGENERATION) + + def test_owner(self): + OWNER = {'entity': 'project-owner-12345', 'entityId': '23456'} + properties = {'owner': OWNER} + bucket = self._makeOne(properties=properties) + owner = bucket.owner + self.assertEqual(owner['entity'], 'project-owner-12345') + self.assertEqual(owner['id'], '23456') + + def test_project_number(self): + PROJECT_NUMBER = 12345 + properties = {'projectNumber': PROJECT_NUMBER} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.project_number, PROJECT_NUMBER) + + def test_self_link(self): + SELF_LINK = 'http://example.com/self/' + properties = {'selfLink': SELF_LINK} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.self_link, SELF_LINK) + + def test_storage_class(self): + STORAGE_CLASS = 'http://example.com/self/' + properties = {'storageClass': STORAGE_CLASS} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.storage_class, STORAGE_CLASS) + + def test_time_created(self): + TIME_CREATED = '2014-11-05T20:34:37Z' + properties = {'timeCreated': TIME_CREATED} + bucket = self._makeOne(properties=properties) + self.assertEqual(bucket.time_created, TIME_CREATED) + + def test_get_versioning_eager(self): NAME = 'name' - CORS_ENTRY = { - 'maxAgeSeconds': 1234, - 'method': ['OPTIONS', 'GET'], - 'origin': ['127.0.0.1'], - 'responseHeader': ['Content-Type'], - } - before = {'cors': [CORS_ENTRY, {}]} + before = {'bar': 'Bar', 'versioning': {'enabled': True}} connection = _Connection() bucket = self._makeOne(connection, NAME, before) - entries = bucket.get_cors() - self.assertEqual(len(entries), 2) - self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) - self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) - self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) - self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) - self.assertEqual(entries[1], {}) + self.assertEqual(bucket.get_versioning(), True) kw = connection._requested self.assertEqual(len(kw), 0) - def test_get_cors_lazy(self): + def test_get_versioning_lazy(self): NAME = 'name' - CORS_ENTRY = { - 'maxAgeSeconds': 1234, - 'method': ['OPTIONS', 'GET'], - 'origin': ['127.0.0.1'], - 'responseHeader': ['Content-Type'], - } - after = {'cors': [CORS_ENTRY]} + before = {'bar': 'Bar'} + after = {'bar': 'Bar', 'versioning': {'enabled': True}} connection = _Connection(after) - bucket = self._makeOne(connection, NAME) - entries = bucket.get_cors() - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) - self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) - self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) - self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) + bucket = self._makeOne(connection, NAME, before) + self.assertEqual(bucket.get_versioning(), True) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - def test_update_cors(self): + def test_enable_versioning(self): NAME = 'name' - CORS_ENTRY = { - 'maxAgeSeconds': 1234, - 'method': ['OPTIONS', 'GET'], - 'origin': ['127.0.0.1'], - 'responseHeader': ['Content-Type'], - } - MAPPED = { - 'max_age': 1234, - 'methods': ['OPTIONS', 'GET'], - 'origins': ['127.0.0.1'], - 'headers': ['Content-Type'], - } - after = {'cors': [CORS_ENTRY, {}]} + before = {'versioning': {'enabled': False}} + after = {'versioning': {'enabled': True}} connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertFalse(bucket.get_versioning()) + bucket.enable_versioning() + self.assertTrue(bucket.get_versioning()) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['data'], {'versioning': {'enabled': True}}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_disable_versioning(self): + NAME = 'name' + before = {'versioning': {'enabled': True}} + after = {'versioning': {'enabled': False}} + connection = _Connection(after) + bucket = self._makeOne(connection, NAME, before) + self.assertTrue(bucket.get_versioning()) + bucket.disable_versioning() + self.assertFalse(bucket.get_versioning()) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['data'], after) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_configure_website_defaults(self): + NAME = 'name' + patched = {'website': {'mainPageSuffix': None, + 'notFoundPage': None}} + connection = _Connection(patched) bucket = self._makeOne(connection, NAME) - bucket.update_cors([MAPPED, {}]) + self.assertTrue(bucket.configure_website() is bucket) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], after) + self.assertEqual(kw[0]['data'], patched) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - entries = bucket.get_cors() - self.assertEqual(entries, [MAPPED, {}]) - def test_get_location_eager(self): + def test_configure_website_explicit(self): NAME = 'name' - connection = _Connection() - before = {'location': 'AS'} - bucket = self._makeOne(connection, NAME, before) - self.assertEqual(bucket.get_location(), 'AS') + patched = {'website': {'mainPageSuffix': 'html', + 'notFoundPage': '404.html'}} + connection = _Connection(patched) + bucket = self._makeOne(connection, NAME) + self.assertTrue(bucket.configure_website('html', '404.html') is bucket) + self.assertEqual(bucket.properties, patched) kw = connection._requested - self.assertEqual(len(kw), 0) + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], patched) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_get_location_lazy(self): + def test_disable_website(self): NAME = 'name' - connection = _Connection({'location': 'AS'}) + patched = {'website': {'mainPageSuffix': None, + 'notFoundPage': None}} + connection = _Connection(patched) bucket = self._makeOne(connection, NAME) - self.assertEqual(bucket.get_location(), 'AS') + self.assertTrue(bucket.disable_website() is bucket) + self.assertEqual(bucket.properties, patched) kw = connection._requested self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]['method'], 'GET') + self.assertEqual(kw[0]['method'], 'PATCH') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], patched) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) - def test_update_location(self): + def test_make_public_defaults(self): + from gcloud.storage.acl import _ACLEntity NAME = 'name' - connection = _Connection({'location': 'AS'}) + permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] + after = {'acl': permissive, 'defaultObjectAcl': []} + connection = _Connection(after) bucket = self._makeOne(connection, NAME) - bucket.set_location('AS') - self.assertEqual(bucket.get_location(), 'AS') + bucket.acl.loaded = True + bucket.make_public() + self.assertEqual(list(bucket.acl), permissive) + self.assertEqual(list(bucket.default_object_acl), []) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') self.assertEqual(kw[0]['path'], '/b/%s' % NAME) - self.assertEqual(kw[0]['data'], {'location': 'AS'}) + self.assertEqual(kw[0]['data'], {'acl': after['acl']}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def test_make_public_w_future(self): + from gcloud.storage.acl import _ACLEntity + NAME = 'name' + permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] + after1 = {'acl': permissive, 'defaultObjectAcl': []} + after2 = {'acl': permissive, 'defaultObjectAcl': permissive} + connection = _Connection(after1, after2) + bucket = self._makeOne(connection, NAME) + bucket.acl.loaded = True + bucket.default_object_acl.loaded = True + bucket.make_public(future=True) + self.assertEqual(list(bucket.acl), permissive) + self.assertEqual(list(bucket.default_object_acl), permissive) + kw = connection._requested + self.assertEqual(len(kw), 2) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'acl': permissive}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[1]['method'], 'PATCH') + self.assertEqual(kw[1]['path'], '/b/%s' % NAME) + self.assertEqual(kw[1]['data'], {'defaultObjectAcl': permissive}) + self.assertEqual(kw[1]['query_params'], {'projection': 'full'}) + + def test_make_public_recursive(self): + from gcloud.storage.acl import _ACLEntity + from gcloud._testing import _Monkey + from gcloud.storage import key + from gcloud.storage import bucket as MUT + _saved = [] + + class _Key(object): + _granted = False + + def __init__(self, bucket, name): + self._bucket = bucket + self._name = name + + def get_acl(self): + return self + + def all(self): + return self + + def grant_read(self): + self._granted = True + + def save_acl(self): + _saved.append((self._bucket, self._name, self._granted)) + + class _KeyIterator(key._KeyIterator): + def get_items_from_response(self, response): + for item in response.get('items', []): + yield _Key(self.bucket, item['name']) + + NAME = 'name' + KEY = 'key' + permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] + after = {'acl': permissive, 'defaultObjectAcl': []} + connection = _Connection(after, {'items': [{'name': KEY}]}) + bucket = self._makeOne(connection, NAME) + bucket.acl.loaded = True + with _Monkey(MUT, _KeyIterator=_KeyIterator): + bucket.make_public(recursive=True) + self.assertEqual(list(bucket.acl), permissive) + self.assertEqual(list(bucket.default_object_acl), []) + self.assertEqual(_saved, [(bucket, KEY, True)]) + kw = connection._requested + self.assertEqual(len(kw), 2) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'acl': permissive}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[1]['method'], 'GET') + self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME) + self.assertEqual(kw[1]['query_params'], {}) class TestBucketIterator(unittest2.TestCase): From aa7afbb47ec1f865ce5017d13e04c02fba9942d0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 16:50:34 -0500 Subject: [PATCH 12/20] Make 'lazy' test less eager. --- gcloud/storage/test_bucket.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 1482790d09d1..c2657cbb2c6b 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -727,10 +727,9 @@ def test_get_versioning_eager(self): def test_get_versioning_lazy(self): NAME = 'name' - before = {'bar': 'Bar'} after = {'bar': 'Bar', 'versioning': {'enabled': True}} connection = _Connection(after) - bucket = self._makeOne(connection, NAME, before) + bucket = self._makeOne(connection, NAME) self.assertEqual(bucket.get_versioning(), True) kw = connection._requested self.assertEqual(len(kw), 1) From 5f1667ace26ab511ca8e30438f72df7fcfa24ef6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 16:50:46 -0500 Subject: [PATCH 13/20] Fix up accessor / mutators methods to use new helpers. --- gcloud/storage/bucket.py | 33 +++++++++++---------------------- 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 638a84f9ce8c..400ad750b545 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -367,10 +367,8 @@ def get_cors(self): Keys include 'max_age', 'methods', 'origins', and 'headers'. """ - if not self.has_metadata('cors'): - self.reload_metadata() result = [] - for entry in self.metadata.get('cors', ()): + for entry in self.properties.get('cors', ()): entry = entry.copy() result.append(entry) if 'maxAgeSeconds' in entry: @@ -406,7 +404,7 @@ def update_cors(self, entries): entry['origin'] = entry.pop('origins') if 'headers' in entry: entry['responseHeader'] = entry.pop('headers') - self.patch_metadata({'cors': to_patch}) + self._patch_properties({'cors': to_patch}) def get_default_object_acl(self): """Get the current Default Object ACL rules. @@ -453,10 +451,8 @@ def get_lifecycle(self): :rtype: list(dict) :returns: A sequence of mappings describing each lifecycle rule. """ - if not self._has_property('lifecycle'): - self._reload_properties() result = [] - info = self._properties.get('lifecycle', {}) + info = self.properties.get('lifecycle', {}) for rule in info.get('rule', ()): rule = rule.copy() result.append(rule) @@ -482,9 +478,7 @@ def get_location(self): :rtype: string :returns: The configured location. """ - if not self.has_metadata('location'): - self.reload_metadata() - return self.metadata.get('location') + return self.properties.get('location') def set_location(self, location): """Update location configured for this bucket. @@ -495,7 +489,7 @@ def set_location(self, location): :type location: string :param location: The new configured location. """ - self.patch_metadata({'location': location}) + self._patch_properties({'location': location}) def get_logging(self): """Return info about access logging for this bucket. @@ -506,9 +500,7 @@ def get_logging(self): :returns: a dict w/ keys, ``bucket_name`` and ``object_prefix`` (if logging is enabled), or None (if not). """ - if not self.has_metadata('logging'): - self.reload_metadata() - info = self.metadata.get('logging') + info = self.properties.get('logging') if info is not None: info = info.copy() info['bucket_name'] = info.pop('logBucket') @@ -527,14 +519,14 @@ def enable_logging(self, bucket_name, object_prefix=''): :param object_prefix: prefix for access log filenames """ info = {'logBucket': bucket_name, 'logObjectPrefix': object_prefix} - self.patch_metadata({'logging': info}) + self._patch_properties({'logging': info}) def disable_logging(self): """Disable access logging for this bucket. See: https://cloud.google.com/storage/docs/accesslogs#disabling """ - self.patch_metadata({'logging': None}) + self._patch_properties({'logging': None}) @property def metageneration(self): @@ -616,9 +608,7 @@ def get_versioning(self): :rtype: boolean :returns: True if enabled, else False. """ - if not self.has_metadata(field='versioning'): - self.reload_metadata() - versioning = self.metadata.get('versioning', {}) + versioning = self.properties.get('versioning', {}) return versioning.get('enabled', False) def enable_versioning(self): @@ -627,7 +617,7 @@ def enable_versioning(self): See: https://cloud.google.com/storage/docs/object-versioning for details. """ - self.patch_metadata({'versioning': {'enabled': True}}) + self._patch_properties({'versioning': {'enabled': True}}) def disable_versioning(self): """Disable versioning for this bucket. @@ -635,8 +625,7 @@ def disable_versioning(self): See: https://cloud.google.com/storage/docs/object-versioning for details. """ - self.patch_metadata({'versioning': {'enabled': False}}) - + self._patch_properties({'versioning': {'enabled': False}}) def configure_website(self, main_page_suffix=None, not_found_page=None): """Configure website-related properties. From 72f3e5aa66eb9d5d00e1b40b020a7f32a13b80b8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Nov 2014 16:51:13 -0500 Subject: [PATCH 14/20] Bucket has more fields than pylint wants to allow. --- pylintrc_default | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pylintrc_default b/pylintrc_default index 84293654d3aa..89370c69e4b4 100644 --- a/pylintrc_default +++ b/pylintrc_default @@ -7,7 +7,7 @@ good-names = i, j, k, ex, Run, _, pb, id, [DESIGN] max-args = 10 -max-public-methods = 30 +max-public-methods = 40 [FORMAT] # NOTE: By default pylint ignores whitespace checks around the From f77e593e11695ab6acd03cb080f27a315de77892 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Nov 2014 10:17:26 -0500 Subject: [PATCH 15/20] Remove backward-compatibility aliases. --- gcloud/storage/_helpers.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 023655c3e6d9..343d3a270869 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -53,8 +53,6 @@ def properties(self): self._reload_properties() return self._properties.copy() - metadata = properties # Backward-compatibiltiy alias - def _reload_properties(self): """Reload properties from Cloud Storage. @@ -67,7 +65,6 @@ def _reload_properties(self): self._properties = self.connection.api_request( method='GET', path=self.path, query_params=query_params) return self - reload_metadata = _reload_properties # backward-compat alias def _patch_properties(self, properties): """Update particular fields of this object's properties. @@ -90,7 +87,6 @@ def _patch_properties(self, properties): method='PATCH', path=self.path, data=properties, query_params={'projection': 'full'}) return self - patch_metadata = _patch_properties # backward-compat alias def _has_property(self, field=None): """Check if property is available. @@ -105,7 +101,6 @@ def _has_property(self, field=None): if field and field not in self._properties: return False return len(self._properties) > 0 - has_metadata = _has_property # backward-compat alias def _get_property(self, field, default=None): """Return the value of a field from the server-side representation. @@ -133,7 +128,6 @@ def _get_property(self, field, default=None): self._reload_properties() return self._properties.get(field, default) - get_metadata = _get_property # Backward-compat alias def get_acl(self): """Get ACL as an object. From b8ea27413db03fc1bf63690447b9f7f7fcba33f0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Nov 2014 10:18:43 -0500 Subject: [PATCH 16/20] Docstring fixes. --- gcloud/storage/_helpers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 343d3a270869..31fd0cbc9356 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -37,8 +37,8 @@ def __init__(self, name=None, properties=None): :type name: string :param name: The name of the object. - :type metadata: dict - :param metadata: All the other data provided by Cloud Storage. + :type properties: dict + :param properties: All the other data provided by Cloud Storage. """ self.name = name self._properties = {} @@ -48,6 +48,8 @@ def __init__(self, name=None, properties=None): @property def properties(self): """Ensure properties are loaded, and return a copy. + + :rtype: dict """ if not self._properties: self._reload_properties() From 262b413b655836a807b0fcbf1d7de215cc707f0d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Nov 2014 10:23:33 -0500 Subject: [PATCH 17/20] Drop (unused-and-confusing) '_has_property' method. --- gcloud/storage/_helpers.py | 14 -------------- gcloud/storage/test__helpers.py | 16 ---------------- 2 files changed, 30 deletions(-) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 31fd0cbc9356..ac6f45b90223 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -90,20 +90,6 @@ def _patch_properties(self, properties): query_params={'projection': 'full'}) return self - def _has_property(self, field=None): - """Check if property is available. - - :type field: string - :param field: (optional) the particular field to check for. - - :rtype: boolean - :returns: Whether property is available locally. If no ``field`` - passed, return whether *any* properties are available. - """ - if field and field not in self._properties: - return False - return len(self._properties) > 0 - def _get_property(self, field, default=None): """Return the value of a field from the server-side representation. diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 100d48e4ecd2..db8a75944a2b 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -44,22 +44,6 @@ def test__reload_properties(self): self.assertEqual(kw[0]['path'], '/path') self.assertEqual(kw[0]['query_params'], {'projection': 'noAcl'}) - def test__has_property_not_loaded(self): - mixin = self._makeOne() - self.assertEqual(mixin._has_property('nonesuch'), False) - - def test__has_property_loaded_no_field(self): - mixin = self._makeOne(properties={'foo': 'Foo'}) - self.assertEqual(mixin._has_property(), True) - - def test__has_property_loaded_miss(self): - mixin = self._makeOne(properties={'foo': 'Foo'}) - self.assertEqual(mixin._has_property('nonesuch'), False) - - def test__has_property_loaded_hit(self): - mixin = self._makeOne(properties={'extant': False}) - self.assertEqual(mixin._has_property('extant'), True) - def test__get_property_eager_hit(self): derived = self._derivedClass()(properties={'foo': 'Foo'}) self.assertEqual(derived._get_property('foo'), 'Foo') From 25b7b5c1d72d6bc44fc1435586e15ce11ee8bdcd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Nov 2014 10:24:17 -0500 Subject: [PATCH 18/20] Use 'properties' to ensure loading before lookup. --- gcloud/storage/_helpers.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index ac6f45b90223..d4a4125b1bd3 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -112,10 +112,7 @@ def _get_property(self, field, default=None): message = "Use '%s' or related methods instead." % custom raise KeyError((field, message)) - if not self._properties or field not in self._properties: - self._reload_properties() - - return self._properties.get(field, default) + return self.properties.get(field, default) def get_acl(self): """Get ACL as an object. From 13b28c224491bf48f3a79b55ac3791c404026fe3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Nov 2014 10:41:53 -0500 Subject: [PATCH 19/20] Fix copy-paste errors in 'Bucket' accessor docstrings. --- gcloud/storage/bucket.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 400ad750b545..b63e08c691dd 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -530,7 +530,7 @@ def disable_logging(self): @property def metageneration(self): - """Retrieve the ID for the bucket. + """Retrieve the metageneration for the bucket. See: https://cloud.google.com/storage/docs/json_api/v1/buckets @@ -542,7 +542,7 @@ def metageneration(self): @property def owner(self): - """Retrieve the ID for the bucket. + """Retrieve info about the owner of the bucket. See: https://cloud.google.com/storage/docs/json_api/v1/buckets @@ -555,7 +555,7 @@ def owner(self): @property def project_number(self): - """Retrieve the ID for the bucket. + """Retrieve the number of the project to which the bucket is assigned. See: https://cloud.google.com/storage/docs/json_api/v1/buckets @@ -577,7 +577,7 @@ def self_link(self): @property def storage_class(self): - """Retrieve the ID for the bucket. + """Retrieve the storage class for the bucket. See: https://cloud.google.com/storage/docs/json_api/v1/buckets and https://cloud.google.com/storage/docs/durable-reduced-availability#_DRA_Bucket @@ -590,7 +590,7 @@ def storage_class(self): @property def time_created(self): - """Retrieve the ID for the bucket. + """Retrieve the timestamp at which the bucket was created. See: https://cloud.google.com/storage/docs/json_api/v1/buckets From 3eaf39dbe9873ddbebc16211915bfd410de95d07 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Nov 2014 10:58:21 -0500 Subject: [PATCH 20/20] Avoid remapping names of keys in server-side properties. Per @dhermes, https://github.com/GoogleCloudPlatform/gcloud-python/pull/347#discussion_r19950864. --- gcloud/storage/bucket.py | 53 ++++++----------------------------- gcloud/storage/test_bucket.py | 52 +++++++++++++++++----------------- 2 files changed, 35 insertions(+), 70 deletions(-) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index b63e08c691dd..7d60a8c1cd4d 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -364,22 +364,8 @@ def get_cors(self): :rtype: list(dict) :returns: A sequence of mappings describing each CORS policy. - Keys include 'max_age', 'methods', 'origins', and - 'headers'. - """ - result = [] - for entry in self.properties.get('cors', ()): - entry = entry.copy() - result.append(entry) - if 'maxAgeSeconds' in entry: - entry['max_age'] = entry.pop('maxAgeSeconds') - if 'method' in entry: - entry['methods'] = entry.pop('method') - if 'origin' in entry: - entry['origins'] = entry.pop('origin') - if 'responseHeader' in entry: - entry['headers'] = entry.pop('responseHeader') - return result + """ + return [policy.copy() for policy in self.properties.get('cors', ())] def update_cors(self, entries): """Update CORS policies configured for this bucket. @@ -389,22 +375,8 @@ def update_cors(self, entries): :type entries: list(dict) :param entries: A sequence of mappings describing each CORS policy. - Keys include 'max_age', 'methods', 'origins', and - 'headers'. - """ - to_patch = [] - for entry in entries: - entry = entry.copy() - to_patch.append(entry) - if 'max_age' in entry: - entry['maxAgeSeconds'] = entry.pop('max_age') - if 'methods' in entry: - entry['method'] = entry.pop('methods') - if 'origins' in entry: - entry['origin'] = entry.pop('origins') - if 'headers' in entry: - entry['responseHeader'] = entry.pop('headers') - self._patch_properties({'cors': to_patch}) + """ + self._patch_properties({'cors': entries}) def get_default_object_acl(self): """Get the current Default Object ACL rules. @@ -451,12 +423,8 @@ def get_lifecycle(self): :rtype: list(dict) :returns: A sequence of mappings describing each lifecycle rule. """ - result = [] info = self.properties.get('lifecycle', {}) - for rule in info.get('rule', ()): - rule = rule.copy() - result.append(rule) - return result + return [rule.copy() for rule in info.get('rule', ())] def update_lifecycle(self, rules): """Update CORS policies configured for this bucket. @@ -497,15 +465,12 @@ def get_logging(self): See: https://cloud.google.com/storage/docs/accesslogs#status :rtype: dict or None - :returns: a dict w/ keys, ``bucket_name`` and ``object_prefix`` + :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix`` (if logging is enabled), or None (if not). """ info = self.properties.get('logging') if info is not None: - info = info.copy() - info['bucket_name'] = info.pop('logBucket') - info['object_prefix'] = info.pop('logObjectPrefix', '') - return info + return info.copy() def enable_logging(self, bucket_name, object_prefix=''): """Enable access logging for this bucket. @@ -549,9 +514,7 @@ def owner(self): :rtype: dict :returns: mapping of owner's role/ID. """ - owner = self.properties['owner'].copy() - owner['id'] = owner.pop('entityId') - return owner + return self.properties['owner'].copy() @property def project_number(self): diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index c2657cbb2c6b..dc46016d033b 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -419,10 +419,14 @@ def test_get_cors_eager(self): bucket = self._makeOne(connection, NAME, before) entries = bucket.get_cors() self.assertEqual(len(entries), 2) - self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) - self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) - self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) - self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) + self.assertEqual(entries[0]['maxAgeSeconds'], + CORS_ENTRY['maxAgeSeconds']) + self.assertEqual(entries[0]['method'], + CORS_ENTRY['method']) + self.assertEqual(entries[0]['origin'], + CORS_ENTRY['origin']) + self.assertEqual(entries[0]['responseHeader'], + CORS_ENTRY['responseHeader']) self.assertEqual(entries[1], {}) kw = connection._requested self.assertEqual(len(kw), 0) @@ -440,10 +444,14 @@ def test_get_cors_lazy(self): bucket = self._makeOne(connection, NAME) entries = bucket.get_cors() self.assertEqual(len(entries), 1) - self.assertEqual(entries[0]['max_age'], CORS_ENTRY['maxAgeSeconds']) - self.assertEqual(entries[0]['methods'], CORS_ENTRY['method']) - self.assertEqual(entries[0]['origins'], CORS_ENTRY['origin']) - self.assertEqual(entries[0]['headers'], CORS_ENTRY['responseHeader']) + self.assertEqual(entries[0]['maxAgeSeconds'], + CORS_ENTRY['maxAgeSeconds']) + self.assertEqual(entries[0]['method'], + CORS_ENTRY['method']) + self.assertEqual(entries[0]['origin'], + CORS_ENTRY['origin']) + self.assertEqual(entries[0]['responseHeader'], + CORS_ENTRY['responseHeader']) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') @@ -458,16 +466,10 @@ def test_update_cors(self): 'origin': ['127.0.0.1'], 'responseHeader': ['Content-Type'], } - MAPPED = { - 'max_age': 1234, - 'methods': ['OPTIONS', 'GET'], - 'origins': ['127.0.0.1'], - 'headers': ['Content-Type'], - } after = {'cors': [CORS_ENTRY, {}]} connection = _Connection(after) bucket = self._makeOne(connection, NAME) - bucket.update_cors([MAPPED, {}]) + bucket.update_cors([CORS_ENTRY, {}]) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -475,7 +477,7 @@ def test_update_cors(self): self.assertEqual(kw[0]['data'], after) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) entries = bucket.get_cors() - self.assertEqual(entries, [MAPPED, {}]) + self.assertEqual(entries, [CORS_ENTRY, {}]) def test_get_default_object_acl_lazy(self): from gcloud.storage.acl import BucketACL @@ -602,8 +604,8 @@ def test_get_logging_eager_w_prefix(self): connection = _Connection() bucket = self._makeOne(connection, NAME, before) info = bucket.get_logging() - self.assertEqual(info['bucket_name'], LOG_BUCKET) - self.assertEqual(info['object_prefix'], LOG_PREFIX) + self.assertEqual(info['logBucket'], LOG_BUCKET) + self.assertEqual(info['logObjectPrefix'], LOG_PREFIX) kw = connection._requested self.assertEqual(len(kw), 0) @@ -614,8 +616,8 @@ def test_get_logging_lazy_wo_prefix(self): connection = _Connection(after) bucket = self._makeOne(connection, NAME) info = bucket.get_logging() - self.assertEqual(info['bucket_name'], LOG_BUCKET) - self.assertEqual(info['object_prefix'], '') + self.assertEqual(info['logBucket'], LOG_BUCKET) + self.assertEqual(info.get('logObjectPrefix'), None) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'GET') @@ -632,8 +634,8 @@ def test_enable_logging_defaults(self): self.assertTrue(bucket.get_logging() is None) bucket.enable_logging(LOG_BUCKET) info = bucket.get_logging() - self.assertEqual(info['bucket_name'], LOG_BUCKET) - self.assertEqual(info['object_prefix'], '') + self.assertEqual(info['logBucket'], LOG_BUCKET) + self.assertEqual(info['logObjectPrefix'], '') kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -653,8 +655,8 @@ def test_enable_logging_explicit(self): self.assertTrue(bucket.get_logging() is None) bucket.enable_logging(LOG_BUCKET, LOG_PFX) info = bucket.get_logging() - self.assertEqual(info['bucket_name'], LOG_BUCKET) - self.assertEqual(info['object_prefix'], LOG_PFX) + self.assertEqual(info['logBucket'], LOG_BUCKET) + self.assertEqual(info['logObjectPrefix'], LOG_PFX) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'PATCH') @@ -690,7 +692,7 @@ def test_owner(self): bucket = self._makeOne(properties=properties) owner = bucket.owner self.assertEqual(owner['entity'], 'project-owner-12345') - self.assertEqual(owner['id'], '23456') + self.assertEqual(owner['entityId'], '23456') def test_project_number(self): PROJECT_NUMBER = 12345