Skip to content

Commit

Permalink
Move threadlocal stacks of batches to relevant connection module.
Browse files Browse the repository at this point in the history
  • Loading branch information
tseaver committed May 27, 2015
1 parent 6cf0852 commit f8e40d7
Show file tree
Hide file tree
Showing 10 changed files with 71 additions and 55 deletions.
9 changes: 5 additions & 4 deletions gcloud/datastore/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

from gcloud.datastore import _implicit_environ
from gcloud.datastore.batch import Batch
from gcloud.datastore.connection import _CONNECTIONS
from gcloud.datastore.entity import Entity
from gcloud.datastore.transaction import Transaction
from gcloud.datastore import helpers
Expand Down Expand Up @@ -53,7 +54,7 @@ def _require_dataset_id(dataset_id=None, first_key=None):
"""
if dataset_id is not None:
return dataset_id
top = Batch.current()
top = _CONNECTIONS.top
if top is not None:
return top.dataset_id
if first_key is not None:
Expand All @@ -77,7 +78,7 @@ def _require_connection(connection=None):
cannot be inferred from the environment.
"""
if connection is None:
top = Batch.current()
top = _CONNECTIONS.top
if top is not None:
connection = top.connection
else:
Expand Down Expand Up @@ -262,7 +263,7 @@ def put(entities, connection=None, dataset_id=None):
connection = _require_connection(connection)
dataset_id = _require_dataset_id(dataset_id, entities[0].key)

current = Batch.current()
current = _CONNECTIONS.top
in_batch = current is not None
if not in_batch:
current = Batch(dataset_id=dataset_id, connection=connection)
Expand Down Expand Up @@ -298,7 +299,7 @@ def delete(keys, connection=None, dataset_id=None):
dataset_id = _require_dataset_id(dataset_id, keys[0])

# We allow partial keys to attempt a delete, the backend will fail.
current = Batch.current()
current = _CONNECTIONS.top
in_batch = current is not None
if not in_batch:
current = Batch(dataset_id=dataset_id, connection=connection)
Expand Down
11 changes: 4 additions & 7 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,13 @@
https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations
"""

from gcloud._helpers import _LocalStack
from gcloud.datastore import _implicit_environ
from gcloud.datastore import helpers
from gcloud.datastore.connection import _CONNECTIONS
from gcloud.datastore.key import _dataset_ids_equal
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb


_BATCHES = _LocalStack()


class Batch(object):
"""An abstraction representing a collected group of updates / deletes.
Expand Down Expand Up @@ -90,7 +87,7 @@ def __init__(self, dataset_id=None, connection=None):
@staticmethod
def current():
"""Return the topmost batch / transaction, or None."""
return _BATCHES.top
return _CONNECTIONS.top

@property
def dataset_id(self):
Expand Down Expand Up @@ -229,7 +226,7 @@ def rollback(self):
pass

def __enter__(self):
_BATCHES.push(self)
_CONNECTIONS.push(self)
self.begin()
return self

Expand All @@ -240,7 +237,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
else:
self.rollback()
finally:
_BATCHES.pop()
_CONNECTIONS.pop()


def _assign_entity_to_mutation(mutation_pb, entity, auto_id_entities):
Expand Down
4 changes: 4 additions & 0 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,14 @@
import os

from gcloud import connection
from gcloud._helpers import _LocalStack
from gcloud.exceptions import make_exception
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb


_CONNECTIONS = _LocalStack()


SCOPE = ('https://www.googleapis.com/auth/datastore',
'https://www.googleapis.com/auth/userinfo.email')
"""The scopes required for authenticating as a Cloud Datastore consumer."""
Expand Down
16 changes: 8 additions & 8 deletions gcloud/datastore/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -925,13 +925,13 @@ def __init__(self, dataset_id, connection):
self._batch = Batch(dataset_id, connection)

def __enter__(self):
from gcloud.datastore.batch import _BATCHES
_BATCHES.push(self._batch)
from gcloud.datastore.connection import _CONNECTIONS
_CONNECTIONS.push(self._batch)
return self._batch

def __exit__(self, *args):
from gcloud.datastore.batch import _BATCHES
_BATCHES.pop()
from gcloud.datastore.connection import _CONNECTIONS
_CONNECTIONS.pop()


class _NoCommitTransaction(object):
Expand All @@ -942,13 +942,13 @@ def __init__(self, dataset_id, connection, transaction_id='TRANSACTION'):
xact._id = transaction_id

def __enter__(self):
from gcloud.datastore.batch import _BATCHES
_BATCHES.push(self._transaction)
from gcloud.datastore.connection import _CONNECTIONS
_CONNECTIONS.push(self._transaction)
return self._transaction

def __exit__(self, *args):
from gcloud.datastore.batch import _BATCHES
_BATCHES.pop()
from gcloud.datastore.connection import _CONNECTIONS
_CONNECTIONS.pop()


class _HttpMultiple(object):
Expand Down
28 changes: 14 additions & 14 deletions gcloud/datastore/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,21 +279,21 @@ def test_commit_w_auto_id_entities(self):
self.assertEqual(key._id, _NEW_ID)

def test_as_context_mgr_wo_error(self):
from gcloud.datastore.batch import _BATCHES
from gcloud.datastore.connection import _CONNECTIONS
_DATASET = 'DATASET'
_PROPERTIES = {'foo': 'bar'}
connection = _Connection()
entity = _Entity(_PROPERTIES)
key = entity.key = _Key(_DATASET)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

with self._makeOne(dataset_id=_DATASET,
connection=connection) as batch:
self.assertEqual(list(_BATCHES), [batch])
self.assertEqual(list(_CONNECTIONS), [batch])
batch.put(entity)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

insert_auto_ids = list(batch.mutation.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
Expand All @@ -305,7 +305,7 @@ def test_as_context_mgr_wo_error(self):
self.assertEqual(connection._committed, [(_DATASET, batch.mutation)])

def test_as_context_mgr_nested(self):
from gcloud.datastore.batch import _BATCHES
from gcloud.datastore.connection import _CONNECTIONS
_DATASET = 'DATASET'
_PROPERTIES = {'foo': 'bar'}
connection = _Connection()
Expand All @@ -314,20 +314,20 @@ def test_as_context_mgr_nested(self):
entity2 = _Entity(_PROPERTIES)
key2 = entity2.key = _Key(_DATASET)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

with self._makeOne(dataset_id=_DATASET,
connection=connection) as batch1:
self.assertEqual(list(_BATCHES), [batch1])
self.assertEqual(list(_CONNECTIONS), [batch1])
batch1.put(entity1)
with self._makeOne(dataset_id=_DATASET,
connection=connection) as batch2:
self.assertEqual(list(_BATCHES), [batch2, batch1])
self.assertEqual(list(_CONNECTIONS), [batch2, batch1])
batch2.put(entity2)

self.assertEqual(list(_BATCHES), [batch1])
self.assertEqual(list(_CONNECTIONS), [batch1])

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

insert_auto_ids = list(batch1.mutation.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
Expand All @@ -350,25 +350,25 @@ def test_as_context_mgr_nested(self):
(_DATASET, batch1.mutation)])

def test_as_context_mgr_w_error(self):
from gcloud.datastore.batch import _BATCHES
from gcloud.datastore.connection import _CONNECTIONS
_DATASET = 'DATASET'
_PROPERTIES = {'foo': 'bar'}
connection = _Connection()
entity = _Entity(_PROPERTIES)
key = entity.key = _Key(_DATASET)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

try:
with self._makeOne(dataset_id=_DATASET,
connection=connection) as batch:
self.assertEqual(list(_BATCHES), [batch])
self.assertEqual(list(_CONNECTIONS), [batch])
batch.put(entity)
raise ValueError("testing")
except ValueError:
pass

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

insert_auto_ids = list(batch.mutation.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
Expand Down
5 changes: 2 additions & 3 deletions gcloud/storage/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import base64

from gcloud.storage._implicit_environ import get_default_connection
from gcloud.storage.batch import Batch
from gcloud.storage.connection import _CONNECTIONS


class _PropertyMixin(object):
Expand Down Expand Up @@ -124,9 +124,8 @@ def _require_connection(connection=None):
:raises: :class:`EnvironmentError` if ``connection`` is ``None``, and
cannot be inferred from the environment.
"""
# NOTE: We use current Batch directly since it inherits from Connection.
if connection is None:
connection = Batch.current()
connection = _CONNECTIONS.top

if connection is None:
connection = get_default_connection()
Expand Down
11 changes: 4 additions & 7 deletions gcloud/storage/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,10 @@

import six

from gcloud._helpers import _LocalStack
from gcloud.exceptions import make_exception
from gcloud.storage import _implicit_environ
from gcloud.storage.connection import Connection


_BATCHES = _LocalStack()
from gcloud.storage.connection import _CONNECTIONS


class MIMEApplicationHTTP(MIMEApplication):
Expand Down Expand Up @@ -245,18 +242,18 @@ def finish(self):
@staticmethod
def current():
"""Return the topmost batch, or None."""
return _BATCHES.top
return _CONNECTIONS.top

def __enter__(self):
_BATCHES.push(self)
_CONNECTIONS.push(self)
return self

def __exit__(self, exc_type, exc_val, exc_tb):
try:
if exc_type is None:
self.finish()
finally:
_BATCHES.pop()
_CONNECTIONS.pop()


def _generate_faux_mime_message(parser, response, content):
Expand Down
4 changes: 4 additions & 0 deletions gcloud/storage/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
"""Create / interact with gcloud storage connections."""

from gcloud import connection as base_connection
from gcloud._helpers import _LocalStack


_CONNECTIONS = _LocalStack()


SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control',
Expand Down
8 changes: 4 additions & 4 deletions gcloud/storage/test__helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,10 +294,10 @@ def __init__(self, connection):
self._connection = connection

def __enter__(self):
from gcloud.storage.batch import _BATCHES
_BATCHES.push(self._connection)
from gcloud.storage.connection import _CONNECTIONS
_CONNECTIONS.push(self._connection)
return self._connection

def __exit__(self, *args):
from gcloud.storage.batch import _BATCHES
_BATCHES.pop()
from gcloud.storage.connection import _CONNECTIONS
_CONNECTIONS.pop()
30 changes: 22 additions & 8 deletions gcloud/storage/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,20 @@ def test_finish_empty(self):
self.assertRaises(ValueError, batch.finish)
self.assertTrue(connection.http is http)

def test_current(self):
from gcloud.storage.connection import _CONNECTIONS
klass = self._getTargetClass()
http = _HTTP() # no requests expected
connection = _Connection(http=http)
self.assertTrue(klass.current() is None)
batch = self._makeOne(connection)
_CONNECTIONS.push(batch)
try:
self.assertTrue(klass.current() is batch)
finally:
_CONNECTIONS.pop()
self.assertTrue(klass.current() is None)

def _check_subrequest_no_payload(self, chunk, method, url):
lines = chunk.splitlines()
# blank + 2 headers + blank + request + blank + blank
Expand Down Expand Up @@ -370,27 +384,27 @@ def test_finish_nonempty_non_multipart_response(self):
self.assertRaises(ValueError, batch.finish)

def test_as_context_mgr_wo_error(self):
from gcloud.storage.batch import _BATCHES
from gcloud.storage.connection import _CONNECTIONS
URL = 'http://example.com/api'
expected = _Response()
expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="'
http = _HTTP((expected, _THREE_PART_MIME_RESPONSE))
connection = _Connection(http=http)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

target1 = _MockObject()
target2 = _MockObject()
target3 = _MockObject()
with self._makeOne(connection) as batch:
self.assertEqual(list(_BATCHES), [batch])
self.assertEqual(list(_CONNECTIONS), [batch])
batch._make_request('POST', URL, {'foo': 1, 'bar': 2},
target_object=target1)
batch._make_request('PATCH', URL, {'bar': 3},
target_object=target2)
batch._make_request('DELETE', URL, target_object=target3)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])
self.assertEqual(len(batch._requests), 3)
self.assertEqual(batch._requests[0][0], 'POST')
self.assertEqual(batch._requests[1][0], 'PATCH')
Expand All @@ -404,19 +418,19 @@ def test_as_context_mgr_wo_error(self):

def test_as_context_mgr_w_error(self):
from gcloud.storage.batch import _FutureDict
from gcloud.storage.batch import _BATCHES
from gcloud.storage.connection import _CONNECTIONS
URL = 'http://example.com/api'
http = _HTTP()
connection = _Connection(http=http)

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])

target1 = _MockObject()
target2 = _MockObject()
target3 = _MockObject()
try:
with self._makeOne(connection) as batch:
self.assertEqual(list(_BATCHES), [batch])
self.assertEqual(list(_CONNECTIONS), [batch])
batch._make_request('POST', URL, {'foo': 1, 'bar': 2},
target_object=target1)
batch._make_request('PATCH', URL, {'bar': 3},
Expand All @@ -426,7 +440,7 @@ def test_as_context_mgr_w_error(self):
except ValueError:
pass

self.assertEqual(list(_BATCHES), [])
self.assertEqual(list(_CONNECTIONS), [])
self.assertEqual(len(http._requests), 0)
self.assertEqual(len(batch._requests), 3)
self.assertEqual(batch._target_objects, [target1, target2, target3])
Expand Down

0 comments on commit f8e40d7

Please sign in to comment.