From f8e40d757eddb081cc453d4c5cedcdc3e4d9cac9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 19 May 2015 14:35:34 -0400 Subject: [PATCH] Move threadlocal stacks of batches to relevant connection module. --- gcloud/datastore/api.py | 9 +++++---- gcloud/datastore/batch.py | 11 ++++------- gcloud/datastore/connection.py | 4 ++++ gcloud/datastore/test_api.py | 16 ++++++++-------- gcloud/datastore/test_batch.py | 28 ++++++++++++++-------------- gcloud/storage/_helpers.py | 5 ++--- gcloud/storage/batch.py | 11 ++++------- gcloud/storage/connection.py | 4 ++++ gcloud/storage/test__helpers.py | 8 ++++---- gcloud/storage/test_batch.py | 30 ++++++++++++++++++++++-------- 10 files changed, 71 insertions(+), 55 deletions(-) diff --git a/gcloud/datastore/api.py b/gcloud/datastore/api.py index 246641af5a18..88d98186874e 100644 --- a/gcloud/datastore/api.py +++ b/gcloud/datastore/api.py @@ -20,6 +20,7 @@ from gcloud.datastore import _implicit_environ from gcloud.datastore.batch import Batch +from gcloud.datastore.connection import _CONNECTIONS from gcloud.datastore.entity import Entity from gcloud.datastore.transaction import Transaction from gcloud.datastore import helpers @@ -53,7 +54,7 @@ def _require_dataset_id(dataset_id=None, first_key=None): """ if dataset_id is not None: return dataset_id - top = Batch.current() + top = _CONNECTIONS.top if top is not None: return top.dataset_id if first_key is not None: @@ -77,7 +78,7 @@ def _require_connection(connection=None): cannot be inferred from the environment. """ if connection is None: - top = Batch.current() + top = _CONNECTIONS.top if top is not None: connection = top.connection else: @@ -262,7 +263,7 @@ def put(entities, connection=None, dataset_id=None): connection = _require_connection(connection) dataset_id = _require_dataset_id(dataset_id, entities[0].key) - current = Batch.current() + current = _CONNECTIONS.top in_batch = current is not None if not in_batch: current = Batch(dataset_id=dataset_id, connection=connection) @@ -298,7 +299,7 @@ def delete(keys, connection=None, dataset_id=None): dataset_id = _require_dataset_id(dataset_id, keys[0]) # We allow partial keys to attempt a delete, the backend will fail. - current = Batch.current() + current = _CONNECTIONS.top in_batch = current is not None if not in_batch: current = Batch(dataset_id=dataset_id, connection=connection) diff --git a/gcloud/datastore/batch.py b/gcloud/datastore/batch.py index 0b1185322542..1e437078720e 100644 --- a/gcloud/datastore/batch.py +++ b/gcloud/datastore/batch.py @@ -21,16 +21,13 @@ https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations """ -from gcloud._helpers import _LocalStack from gcloud.datastore import _implicit_environ from gcloud.datastore import helpers +from gcloud.datastore.connection import _CONNECTIONS from gcloud.datastore.key import _dataset_ids_equal from gcloud.datastore import _datastore_v1_pb2 as datastore_pb -_BATCHES = _LocalStack() - - class Batch(object): """An abstraction representing a collected group of updates / deletes. @@ -90,7 +87,7 @@ def __init__(self, dataset_id=None, connection=None): @staticmethod def current(): """Return the topmost batch / transaction, or None.""" - return _BATCHES.top + return _CONNECTIONS.top @property def dataset_id(self): @@ -229,7 +226,7 @@ def rollback(self): pass def __enter__(self): - _BATCHES.push(self) + _CONNECTIONS.push(self) self.begin() return self @@ -240,7 +237,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): else: self.rollback() finally: - _BATCHES.pop() + _CONNECTIONS.pop() def _assign_entity_to_mutation(mutation_pb, entity, auto_id_entities): diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 9a9f9ccc583f..47cadfeb6bf6 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -17,10 +17,14 @@ import os from gcloud import connection +from gcloud._helpers import _LocalStack from gcloud.exceptions import make_exception from gcloud.datastore import _datastore_v1_pb2 as datastore_pb +_CONNECTIONS = _LocalStack() + + SCOPE = ('https://www.googleapis.com/auth/datastore', 'https://www.googleapis.com/auth/userinfo.email') """The scopes required for authenticating as a Cloud Datastore consumer.""" diff --git a/gcloud/datastore/test_api.py b/gcloud/datastore/test_api.py index 85cee99ee0d3..dea14cb3f08a 100644 --- a/gcloud/datastore/test_api.py +++ b/gcloud/datastore/test_api.py @@ -925,13 +925,13 @@ def __init__(self, dataset_id, connection): self._batch = Batch(dataset_id, connection) def __enter__(self): - from gcloud.datastore.batch import _BATCHES - _BATCHES.push(self._batch) + from gcloud.datastore.connection import _CONNECTIONS + _CONNECTIONS.push(self._batch) return self._batch def __exit__(self, *args): - from gcloud.datastore.batch import _BATCHES - _BATCHES.pop() + from gcloud.datastore.connection import _CONNECTIONS + _CONNECTIONS.pop() class _NoCommitTransaction(object): @@ -942,13 +942,13 @@ def __init__(self, dataset_id, connection, transaction_id='TRANSACTION'): xact._id = transaction_id def __enter__(self): - from gcloud.datastore.batch import _BATCHES - _BATCHES.push(self._transaction) + from gcloud.datastore.connection import _CONNECTIONS + _CONNECTIONS.push(self._transaction) return self._transaction def __exit__(self, *args): - from gcloud.datastore.batch import _BATCHES - _BATCHES.pop() + from gcloud.datastore.connection import _CONNECTIONS + _CONNECTIONS.pop() class _HttpMultiple(object): diff --git a/gcloud/datastore/test_batch.py b/gcloud/datastore/test_batch.py index de86cc23d2ac..ada2228d3ddd 100644 --- a/gcloud/datastore/test_batch.py +++ b/gcloud/datastore/test_batch.py @@ -279,21 +279,21 @@ def test_commit_w_auto_id_entities(self): self.assertEqual(key._id, _NEW_ID) def test_as_context_mgr_wo_error(self): - from gcloud.datastore.batch import _BATCHES + from gcloud.datastore.connection import _CONNECTIONS _DATASET = 'DATASET' _PROPERTIES = {'foo': 'bar'} connection = _Connection() entity = _Entity(_PROPERTIES) key = entity.key = _Key(_DATASET) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) with self._makeOne(dataset_id=_DATASET, connection=connection) as batch: - self.assertEqual(list(_BATCHES), [batch]) + self.assertEqual(list(_CONNECTIONS), [batch]) batch.put(entity) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) insert_auto_ids = list(batch.mutation.insert_auto_id) self.assertEqual(len(insert_auto_ids), 0) @@ -305,7 +305,7 @@ def test_as_context_mgr_wo_error(self): self.assertEqual(connection._committed, [(_DATASET, batch.mutation)]) def test_as_context_mgr_nested(self): - from gcloud.datastore.batch import _BATCHES + from gcloud.datastore.connection import _CONNECTIONS _DATASET = 'DATASET' _PROPERTIES = {'foo': 'bar'} connection = _Connection() @@ -314,20 +314,20 @@ def test_as_context_mgr_nested(self): entity2 = _Entity(_PROPERTIES) key2 = entity2.key = _Key(_DATASET) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) with self._makeOne(dataset_id=_DATASET, connection=connection) as batch1: - self.assertEqual(list(_BATCHES), [batch1]) + self.assertEqual(list(_CONNECTIONS), [batch1]) batch1.put(entity1) with self._makeOne(dataset_id=_DATASET, connection=connection) as batch2: - self.assertEqual(list(_BATCHES), [batch2, batch1]) + self.assertEqual(list(_CONNECTIONS), [batch2, batch1]) batch2.put(entity2) - self.assertEqual(list(_BATCHES), [batch1]) + self.assertEqual(list(_CONNECTIONS), [batch1]) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) insert_auto_ids = list(batch1.mutation.insert_auto_id) self.assertEqual(len(insert_auto_ids), 0) @@ -350,25 +350,25 @@ def test_as_context_mgr_nested(self): (_DATASET, batch1.mutation)]) def test_as_context_mgr_w_error(self): - from gcloud.datastore.batch import _BATCHES + from gcloud.datastore.connection import _CONNECTIONS _DATASET = 'DATASET' _PROPERTIES = {'foo': 'bar'} connection = _Connection() entity = _Entity(_PROPERTIES) key = entity.key = _Key(_DATASET) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) try: with self._makeOne(dataset_id=_DATASET, connection=connection) as batch: - self.assertEqual(list(_BATCHES), [batch]) + self.assertEqual(list(_CONNECTIONS), [batch]) batch.put(entity) raise ValueError("testing") except ValueError: pass - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) insert_auto_ids = list(batch.mutation.insert_auto_id) self.assertEqual(len(insert_auto_ids), 0) diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index a5acb45c645e..872040621cc9 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -21,7 +21,7 @@ import base64 from gcloud.storage._implicit_environ import get_default_connection -from gcloud.storage.batch import Batch +from gcloud.storage.connection import _CONNECTIONS class _PropertyMixin(object): @@ -124,9 +124,8 @@ def _require_connection(connection=None): :raises: :class:`EnvironmentError` if ``connection`` is ``None``, and cannot be inferred from the environment. """ - # NOTE: We use current Batch directly since it inherits from Connection. if connection is None: - connection = Batch.current() + connection = _CONNECTIONS.top if connection is None: connection = get_default_connection() diff --git a/gcloud/storage/batch.py b/gcloud/storage/batch.py index 151e99a18888..5142877f6f95 100644 --- a/gcloud/storage/batch.py +++ b/gcloud/storage/batch.py @@ -26,13 +26,10 @@ import six -from gcloud._helpers import _LocalStack from gcloud.exceptions import make_exception from gcloud.storage import _implicit_environ from gcloud.storage.connection import Connection - - -_BATCHES = _LocalStack() +from gcloud.storage.connection import _CONNECTIONS class MIMEApplicationHTTP(MIMEApplication): @@ -245,10 +242,10 @@ def finish(self): @staticmethod def current(): """Return the topmost batch, or None.""" - return _BATCHES.top + return _CONNECTIONS.top def __enter__(self): - _BATCHES.push(self) + _CONNECTIONS.push(self) return self def __exit__(self, exc_type, exc_val, exc_tb): @@ -256,7 +253,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None: self.finish() finally: - _BATCHES.pop() + _CONNECTIONS.pop() def _generate_faux_mime_message(parser, response, content): diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index 2740a47063db..24754822b273 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -15,6 +15,10 @@ """Create / interact with gcloud storage connections.""" from gcloud import connection as base_connection +from gcloud._helpers import _LocalStack + + +_CONNECTIONS = _LocalStack() SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control', diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 104ecb38bd05..9157df429676 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -294,10 +294,10 @@ def __init__(self, connection): self._connection = connection def __enter__(self): - from gcloud.storage.batch import _BATCHES - _BATCHES.push(self._connection) + from gcloud.storage.connection import _CONNECTIONS + _CONNECTIONS.push(self._connection) return self._connection def __exit__(self, *args): - from gcloud.storage.batch import _BATCHES - _BATCHES.pop() + from gcloud.storage.connection import _CONNECTIONS + _CONNECTIONS.pop() diff --git a/gcloud/storage/test_batch.py b/gcloud/storage/test_batch.py index 94695671090e..b6b8e50d5101 100644 --- a/gcloud/storage/test_batch.py +++ b/gcloud/storage/test_batch.py @@ -222,6 +222,20 @@ def test_finish_empty(self): self.assertRaises(ValueError, batch.finish) self.assertTrue(connection.http is http) + def test_current(self): + from gcloud.storage.connection import _CONNECTIONS + klass = self._getTargetClass() + http = _HTTP() # no requests expected + connection = _Connection(http=http) + self.assertTrue(klass.current() is None) + batch = self._makeOne(connection) + _CONNECTIONS.push(batch) + try: + self.assertTrue(klass.current() is batch) + finally: + _CONNECTIONS.pop() + self.assertTrue(klass.current() is None) + def _check_subrequest_no_payload(self, chunk, method, url): lines = chunk.splitlines() # blank + 2 headers + blank + request + blank + blank @@ -370,27 +384,27 @@ def test_finish_nonempty_non_multipart_response(self): self.assertRaises(ValueError, batch.finish) def test_as_context_mgr_wo_error(self): - from gcloud.storage.batch import _BATCHES + from gcloud.storage.connection import _CONNECTIONS URL = 'http://example.com/api' expected = _Response() expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="' http = _HTTP((expected, _THREE_PART_MIME_RESPONSE)) connection = _Connection(http=http) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) target1 = _MockObject() target2 = _MockObject() target3 = _MockObject() with self._makeOne(connection) as batch: - self.assertEqual(list(_BATCHES), [batch]) + self.assertEqual(list(_CONNECTIONS), [batch]) batch._make_request('POST', URL, {'foo': 1, 'bar': 2}, target_object=target1) batch._make_request('PATCH', URL, {'bar': 3}, target_object=target2) batch._make_request('DELETE', URL, target_object=target3) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) self.assertEqual(len(batch._requests), 3) self.assertEqual(batch._requests[0][0], 'POST') self.assertEqual(batch._requests[1][0], 'PATCH') @@ -404,19 +418,19 @@ def test_as_context_mgr_wo_error(self): def test_as_context_mgr_w_error(self): from gcloud.storage.batch import _FutureDict - from gcloud.storage.batch import _BATCHES + from gcloud.storage.connection import _CONNECTIONS URL = 'http://example.com/api' http = _HTTP() connection = _Connection(http=http) - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) target1 = _MockObject() target2 = _MockObject() target3 = _MockObject() try: with self._makeOne(connection) as batch: - self.assertEqual(list(_BATCHES), [batch]) + self.assertEqual(list(_CONNECTIONS), [batch]) batch._make_request('POST', URL, {'foo': 1, 'bar': 2}, target_object=target1) batch._make_request('PATCH', URL, {'bar': 3}, @@ -426,7 +440,7 @@ def test_as_context_mgr_w_error(self): except ValueError: pass - self.assertEqual(list(_BATCHES), []) + self.assertEqual(list(_CONNECTIONS), []) self.assertEqual(len(http._requests), 0) self.assertEqual(len(batch._requests), 3) self.assertEqual(batch._target_objects, [target1, target2, target3])