Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Storage: Add 'Bucket.location_type' property. #8570

Merged
merged 2 commits into from
Jul 12, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
122 changes: 111 additions & 11 deletions storage/google/cloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,21 +386,79 @@ class Bucket(_PropertyMixin):
This is used in Bucket.delete() and Bucket.make_public().
"""

STANDARD_STORAGE_CLASS = "STANDARD"
"""Storage class for objects accessed more than once per month."""

NEARLINE_STORAGE_CLASS = "NEARLINE"
"""Storage class for objects accessed at most once per month."""

COLDLINE_STORAGE_CLASS = "COLDLINE"
"""Storage class for objects accessed at most once per year."""

MULTI_REGIONAL_LEGACY_STORAGE_CLASS = "MULTI_REGIONAL"
"""Legacy storage class.

Alias for :attr:`STANDARD_STORAGE_CLASS`.

Implies :attr:`MULTI_REGION_LOCATION_TYPE` for :attr:`location_type`.
"""

REGIONAL_LEGACY_STORAGE_CLASS = "REGIONAL"
"""Legacy storage class.

Alias for :attr:`STANDARD_STORAGE_CLASS`.

Implies :attr:`REGION_LOCATION_TYPE` for :attr:`location_type`.
"""

DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS = "DURABLE_REDUCED_AVAILABILITY"
"""Legacy storage class.

Similar to :attr:`NEARLINE_STORAGE_CLASS`.
"""

_STORAGE_CLASSES = (
"MULTI_REGIONAL",
"REGIONAL",
"NEARLINE",
"COLDLINE",
"STANDARD", # alias for MULTI_REGIONAL/REGIONAL, based on location
"DURABLE_REDUCED_AVAILABILITY", # deprecated
STANDARD_STORAGE_CLASS,
NEARLINE_STORAGE_CLASS,
COLDLINE_STORAGE_CLASS,
MULTI_REGIONAL_LEGACY_STORAGE_CLASS, # deprecated
REGIONAL_LEGACY_STORAGE_CLASS, # deprecated
DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, # deprecated
)
"""Allowed values for :attr:`storage_class`.

Default value is :attr:`STANDARD_STORAGE_CLASS`.

See
https://cloud.google.com/storage/docs/json_api/v1/buckets#storageClass
https://cloud.google.com/storage/docs/storage-classes
"""

MULTI_REGION_LOCATION_TYPE = "multi-region"
"""Location type: data will be replicated across regions in a multi-region.

Provides highest availability across largest area.
"""

REGION_LOCATION_TYPE = "region"
"""Location type: data will be stored within a single region.

Provides lowest latency within a single region.
"""

DUAL_REGION_LOCATION_TYPE = "dual-region"
"""Location type: data will be stored within two primary regions.

Provides high availability and low latency across two regions.
"""

_LOCATION_TYPES = (
MULTI_REGION_LOCATION_TYPE,
REGION_LOCATION_TYPE,
DUAL_REGION_LOCATION_TYPE,
)
"""Allowed values for :attr:`location_type`."""

def __init__(self, client, name=None, user_project=None):
name = _validate_name(name)
super(Bucket, self).__init__(name=name)
Expand Down Expand Up @@ -1377,6 +1435,38 @@ def location(self, value):
warnings.warn(_LOCATION_SETTER_MESSAGE, DeprecationWarning, stacklevel=2)
self._location = value

@property
def location_type(self):
"""Retrieve or set the location type for the bucket.

See https://cloud.google.com/storage/docs/storage-classes

:setter: Set the location type for this bucket.
:getter: Gets the the location type for this bucket.

:rtype: str or ``NoneType``
:returns:
If set, one of :attr:`MULTI_REGION_LOCATION_TYPE`,
:attr:`REGION_LOCATION_TYPE`, or :attr:`DUAL_REGION_LOCATION_TYPE`,
else ``None``.
"""
return self._properties.get("locationType")

@location_type.setter
tseaver marked this conversation as resolved.
Show resolved Hide resolved
def location_type(self, value):
"""Set the location type for the bucket.

See https://cloud.google.com/storage/docs/storage-classes

:type value: str
:param value:
One of :attr:`MULTI_REGION_LOCATION_TYPE`,
:attr:`REGION_LOCATION_TYPE`, or :attr:`DUAL_REGION_LOCATION_TYPE`,
"""
if value not in self._LOCATION_TYPES:
raise ValueError("Invalid location type: %s" % (value,))
self._patch_property("locationType", value)

def get_logging(self):
"""Return info about access logging for this bucket.

Expand Down Expand Up @@ -1533,9 +1623,14 @@ def storage_class(self):
:getter: Gets the the storage class for this bucket.

:rtype: str or ``NoneType``
:returns: If set, one of "MULTI_REGIONAL", "REGIONAL",
"NEARLINE", "COLDLINE", "STANDARD", or
"DURABLE_REDUCED_AVAILABILITY", else ``None``.
:returns:
If set, one of :attr:`NEARLINE_STORAGE_CLASS`,
:attr:`COLDLINE_STORAGE_CLASS`, :attr:`STANDARD_STORAGE_CLASS`,
:attr:`MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
:attr:`REGIONAL_LEGACY_STORAGE_CLASS`,
or
:attr:`DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
else ``None``.
"""
return self._properties.get("storageClass")

Expand All @@ -1546,8 +1641,13 @@ def storage_class(self, value):
See https://cloud.google.com/storage/docs/storage-classes

:type value: str
:param value: one of "MULTI_REGIONAL", "REGIONAL", "NEARLINE",
"COLDLINE", "STANDARD", or "DURABLE_REDUCED_AVAILABILITY"
:param value:
One of :attr:`NEARLINE_STORAGE_CLASS`,
:attr:`COLDLINE_STORAGE_CLASS`, :attr:`STANDARD_STORAGE_CLASS`,
:attr:`MULTI_REGIONAL_LEGACY_STORAGE_CLASS`,
:attr:`REGIONAL_LEGACY_STORAGE_CLASS`,
or
:attr:`DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`,
"""
if value not in self._STORAGE_CLASSES:
raise ValueError("Invalid storage class: %s" % (value,))
Expand Down
93 changes: 77 additions & 16 deletions storage/tests/unit/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,16 +299,23 @@ def _make_one(self, client=None, name=None, properties=None, user_project=None):
bucket._properties = properties or {}
return bucket

def test_ctor_w_invalid_name(self):
NAME = "#invalid"
with self.assertRaises(ValueError):
self._make_one(name=NAME)

def test_ctor(self):
NAME = "name"
properties = {"key": "value"}
bucket = self._make_one(name=NAME, properties=properties)
self.assertEqual(bucket.name, NAME)
self.assertEqual(bucket._properties, properties)
self.assertEqual(list(bucket._changes), [])
self.assertFalse(bucket._acl.loaded)
self.assertIs(bucket._acl.bucket, bucket)
self.assertFalse(bucket._default_object_acl.loaded)
self.assertIs(bucket._default_object_acl.bucket, bucket)
self.assertEqual(list(bucket._label_removals), [])
self.assertIsNone(bucket.user_project)

def test_ctor_w_user_project(self):
Expand All @@ -319,11 +326,13 @@ def test_ctor_w_user_project(self):
bucket = self._make_one(client, name=NAME, user_project=USER_PROJECT)
self.assertEqual(bucket.name, NAME)
self.assertEqual(bucket._properties, {})
self.assertEqual(bucket.user_project, USER_PROJECT)
self.assertEqual(list(bucket._changes), [])
self.assertFalse(bucket._acl.loaded)
self.assertIs(bucket._acl.bucket, bucket)
self.assertFalse(bucket._default_object_acl.loaded)
self.assertIs(bucket._default_object_acl.bucket, bucket)
self.assertEqual(list(bucket._label_removals), [])
self.assertEqual(bucket.user_project, USER_PROJECT)

def test_blob_wo_keys(self):
from google.cloud.storage.blob import Blob
Expand Down Expand Up @@ -1496,6 +1505,47 @@ def test_labels_setter_with_removal(self):
_, _, kwargs = client._connection.api_request.mock_calls[0]
self.assertNotIn("labels", kwargs["data"])

def test_location_type_getter_unset(self):
bucket = self._make_one()
self.assertIsNone(bucket.location_type)

def test_location_type_getter_set(self):
klass = self._get_target_class()
properties = {"locationType": klass.REGION_LOCATION_TYPE}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.location_type, klass.REGION_LOCATION_TYPE)

def test_location_type_setter_invalid(self):
NAME = "name"
bucket = self._make_one(name=NAME)
with self.assertRaises(ValueError):
bucket.location_type = "bogus"
self.assertFalse("locationType" in bucket._changes)

def test_location_type_setter_MULTI_REGION(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.location_type = klass.MULTI_REGION_LOCATION_TYPE
self.assertEqual(bucket.location_type, klass.MULTI_REGION_LOCATION_TYPE)
self.assertTrue("locationType" in bucket._changes)

def test_location_type_setter_REGION(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.location_type = klass.REGION_LOCATION_TYPE
self.assertEqual(bucket.location_type, klass.REGION_LOCATION_TYPE)
self.assertTrue("locationType" in bucket._changes)

def test_location_type_setter_DUAL_REGION(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.location_type = klass.DUAL_REGION_LOCATION_TYPE
self.assertEqual(bucket.location_type, klass.DUAL_REGION_LOCATION_TYPE)
self.assertTrue("locationType" in bucket._changes)

def test_get_logging_w_prefix(self):
NAME = "name"
LOG_BUCKET = "logs"
Expand Down Expand Up @@ -1658,10 +1708,10 @@ def test_self_link(self):
self.assertEqual(bucket.self_link, SELF_LINK)

def test_storage_class_getter(self):
STORAGE_CLASS = "http://example.com/self/"
properties = {"storageClass": STORAGE_CLASS}
klass = self._get_target_class()
properties = {"storageClass": klass.NEARLINE_STORAGE_CLASS}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.storage_class, STORAGE_CLASS)
self.assertEqual(bucket.storage_class, klass.NEARLINE_STORAGE_CLASS)

def test_storage_class_setter_invalid(self):
NAME = "name"
Expand All @@ -1671,45 +1721,56 @@ def test_storage_class_setter_invalid(self):
self.assertFalse("storageClass" in bucket._changes)

def test_storage_class_setter_STANDARD(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.storage_class = "STANDARD"
self.assertEqual(bucket.storage_class, "STANDARD")
bucket.storage_class = klass.STANDARD_STORAGE_CLASS
self.assertEqual(bucket.storage_class, klass.STANDARD_STORAGE_CLASS)
self.assertTrue("storageClass" in bucket._changes)

def test_storage_class_setter_NEARLINE(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.storage_class = "NEARLINE"
self.assertEqual(bucket.storage_class, "NEARLINE")
bucket.storage_class = klass.NEARLINE_STORAGE_CLASS
self.assertEqual(bucket.storage_class, klass.NEARLINE_STORAGE_CLASS)
self.assertTrue("storageClass" in bucket._changes)

def test_storage_class_setter_COLDLINE(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.storage_class = "COLDLINE"
self.assertEqual(bucket.storage_class, "COLDLINE")
bucket.storage_class = klass.COLDLINE_STORAGE_CLASS
self.assertEqual(bucket.storage_class, klass.COLDLINE_STORAGE_CLASS)
self.assertTrue("storageClass" in bucket._changes)

def test_storage_class_setter_MULTI_REGIONAL(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.storage_class = "MULTI_REGIONAL"
self.assertEqual(bucket.storage_class, "MULTI_REGIONAL")
bucket.storage_class = klass.MULTI_REGIONAL_LEGACY_STORAGE_CLASS
self.assertEqual(
bucket.storage_class, klass.MULTI_REGIONAL_LEGACY_STORAGE_CLASS
)
self.assertTrue("storageClass" in bucket._changes)

def test_storage_class_setter_REGIONAL(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.storage_class = "REGIONAL"
self.assertEqual(bucket.storage_class, "REGIONAL")
bucket.storage_class = klass.REGIONAL_LEGACY_STORAGE_CLASS
self.assertEqual(bucket.storage_class, klass.REGIONAL_LEGACY_STORAGE_CLASS)
self.assertTrue("storageClass" in bucket._changes)

def test_storage_class_setter_DURABLE_REDUCED_AVAILABILITY(self):
klass = self._get_target_class()
NAME = "name"
bucket = self._make_one(name=NAME)
bucket.storage_class = "DURABLE_REDUCED_AVAILABILITY"
self.assertEqual(bucket.storage_class, "DURABLE_REDUCED_AVAILABILITY")
bucket.storage_class = klass.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS
self.assertEqual(
bucket.storage_class,
klass.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS,
)
self.assertTrue("storageClass" in bucket._changes)

def test_time_created(self):
Expand Down