diff --git a/gcloud/datastore/api.py b/gcloud/datastore/api.py index 329cb1e5f10e..dca79c12aaeb 100644 --- a/gcloud/datastore/api.py +++ b/gcloud/datastore/api.py @@ -24,22 +24,38 @@ from gcloud.datastore import helpers -def _require_dataset_id(dataset_id=None): +def _require_dataset_id(dataset_id=None, first_key=None): """Infer a dataset ID from the environment, if not passed explicitly. + Order of precedence: + + - Passed `dataset_id` (if not None). + - `dataset_id` of current batch / transaction (if current exists). + - `dataset_id` of first key + - `dataset_id` inferred from the environment (if `set_default_dataset_id` + has been called). + :type dataset_id: string :param dataset_id: Optional. + :type first_key: :class:`gcloud.datastore.key.Key` or None + :param first_key: Optional: first key being manipulated. + :rtype: string :returns: A dataset ID based on the current environment. :raises: :class:`EnvironmentError` if ``dataset_id`` is ``None``, and cannot be inferred from the environment. """ - if dataset_id is None: - if _implicit_environ.DATASET_ID is None: - raise EnvironmentError('Dataset ID could not be inferred.') - dataset_id = _implicit_environ.DATASET_ID - return dataset_id + if dataset_id is not None: + return dataset_id + top = Batch.current() + if top is not None: + return top.dataset_id + if first_key is not None: + return first_key.dataset_id + if _implicit_environ.DATASET_ID is None: + raise EnvironmentError('Dataset ID could not be inferred.') + return _implicit_environ.DATASET_ID def _require_connection(connection=None): @@ -54,38 +70,17 @@ def _require_connection(connection=None): cannot be inferred from the environment. """ if connection is None: - if _implicit_environ.CONNECTION is None: - raise EnvironmentError('Connection could not be inferred.') - connection = _implicit_environ.CONNECTION + top = Batch.current() + if top is not None: + connection = top.connection + else: + if _implicit_environ.CONNECTION is None: + raise EnvironmentError('Connection could not be inferred.') + connection = _implicit_environ.CONNECTION return connection -def _get_dataset_id_from_keys(keys): - """Determines dataset ID from a list of keys. - - :type keys: list of :class:`gcloud.datastore.key.Key` - :param keys: The keys from the same dataset. - - :rtype: string - :returns: The dataset ID of the keys. - :raises: :class:`ValueError` if the key dataset IDs don't agree. - """ - if any(key is None for key in keys): - raise ValueError('None not allowed') - - dataset_id = keys[0].dataset_id - # Rather than creating a list or set of all dataset IDs, we iterate - # and check. We could allow the backend to check this for us if IDs - # with no prefix worked (GoogleCloudPlatform/google-cloud-datastore#59) - # or if we made sure that a prefix s~ or e~ was on each key. - for key in keys[1:]: - if key.dataset_id != dataset_id: - raise ValueError('All keys in get must be from the same dataset.') - - return dataset_id - - -def get(keys, missing=None, deferred=None, connection=None): +def get(keys, missing=None, deferred=None, connection=None, dataset_id=None): """Retrieves entities, along with their attributes. :type keys: list of :class:`gcloud.datastore.key.Key` @@ -103,22 +98,35 @@ def get(keys, missing=None, deferred=None, connection=None): :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional. The connection used to connect to datastore. + If not passed, inferred from the environment. + + :type dataset_id: :class:`gcloud.datastore.connection.Connection` + :param dataset_id: Optional. The dataset ID used to connect to datastore. + If not passed, inferred from the environment. :rtype: list of :class:`gcloud.datastore.entity.Entity` :returns: The requested entities. + :raises: EnvironmentError if ``connection`` or ``dataset_id`` not passed, + and cannot be inferred from the environment. ValueError if + one or more of ``keys`` has a dataset ID which does not match + the passed / inferred dataset ID. """ if not keys: return [] connection = _require_connection(connection) - dataset_id = _get_dataset_id_from_keys(keys) + dataset_id = _require_dataset_id(dataset_id, keys[0]) + + if list(set([key.dataset_id for key in keys])) != [dataset_id]: + raise ValueError('Keys do not match dataset ID') transaction = Transaction.current() entity_pbs = connection.lookup( dataset_id=dataset_id, key_pbs=[k.to_protobuf() for k in keys], - missing=missing, deferred=deferred, + missing=missing, + deferred=deferred, transaction_id=transaction and transaction.id, ) @@ -139,7 +147,7 @@ def get(keys, missing=None, deferred=None, connection=None): return entities -def put(entities, connection=None): +def put(entities, connection=None, dataset_id=None): """Save the entities in the Cloud Datastore. :type entities: list of :class:`gcloud.datastore.entity.Entity` @@ -147,17 +155,26 @@ def put(entities, connection=None): :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional connection used to connect to datastore. + If not passed, inferred from the environment. + + :type dataset_id: :class:`gcloud.datastore.connection.Connection` + :param dataset_id: Optional. The dataset ID used to connect to datastore. + If not passed, inferred from the environment. + + :raises: EnvironmentError if ``connection`` or ``dataset_id`` not passed, + and cannot be inferred from the environment. ValueError if + one or more entities has a key with a dataset ID not matching + the passed / inferred dataset ID. """ if not entities: return - connection = connection or _implicit_environ.CONNECTION + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id, entities[0].key) current = Batch.current() in_batch = current is not None if not in_batch: - keys = [entity.key for entity in entities] - dataset_id = _get_dataset_id_from_keys(keys) current = Batch(dataset_id=dataset_id, connection=connection) for entity in entities: current.put(entity) @@ -165,7 +182,7 @@ def put(entities, connection=None): current.commit() -def delete(keys, connection=None): +def delete(keys, connection=None, dataset_id=None): """Delete the keys in the Cloud Datastore. :type keys: list of :class:`gcloud.datastore.key.Key` @@ -173,17 +190,27 @@ def delete(keys, connection=None): :type connection: :class:`gcloud.datastore.connection.Connection` :param connection: Optional connection used to connect to datastore. + If not passed, inferred from the environment. + + :type dataset_id: :class:`gcloud.datastore.connection.Connection` + :param dataset_id: Optional. The dataset ID used to connect to datastore. + If not passed, inferred from the environment. + + :raises: EnvironmentError if ``connection`` or ``dataset_id`` not passed, + and cannot be inferred from the environment. ValueError if + one or more keys has a dataset ID not matching the passed / + inferred dataset ID. """ if not keys: return - connection = connection or _implicit_environ.CONNECTION + connection = _require_connection(connection) + dataset_id = _require_dataset_id(dataset_id, keys[0]) # We allow partial keys to attempt a delete, the backend will fail. current = Batch.current() in_batch = current is not None if not in_batch: - dataset_id = _get_dataset_id_from_keys(keys) current = Batch(dataset_id=dataset_id, connection=connection) for key in keys: current.delete(key) diff --git a/gcloud/datastore/batch.py b/gcloud/datastore/batch.py index 7de7814841e8..bc9c370fdeea 100644 --- a/gcloud/datastore/batch.py +++ b/gcloud/datastore/batch.py @@ -210,11 +210,16 @@ def put(self, entity): :type entity: :class:`gcloud.datastore.entity.Entity` :param entity: the entity to be saved. - :raises: ValueError if entity has no key assigned. + :raises: ValueError if entity has no key assigned, or if the key's + ``dataset_id`` does not match ours. """ if entity.key is None: raise ValueError("Entity must have a key") + if not helpers._dataset_ids_equal(self._dataset_id, + entity.key.dataset_id): + raise ValueError("Key must be from same dataset as batch") + _assign_entity_to_mutation( self.mutation, entity, self._auto_id_entities) @@ -224,11 +229,16 @@ def delete(self, key): :type key: :class:`gcloud.datastore.key.Key` :param key: the key to be deleted. - :raises: ValueError if key is not complete. + :raises: ValueError if key is not complete, or if the key's + ``dataset_id`` does not match ours. """ if key.is_partial: raise ValueError("Key must be complete") + if not helpers._dataset_ids_equal(self._dataset_id, + key.dataset_id): + raise ValueError("Key must be from same dataset as batch") + key_pb = key.to_protobuf() helpers._add_keys_to_request(self.mutation.delete, [key_pb]) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index c0559ff9be5d..c6178b369ad3 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -319,3 +319,55 @@ def _add_keys_to_request(request_field_pb, key_pbs): for key_pb in key_pbs: key_pb = _prepare_key_for_request(key_pb) request_field_pb.add().CopyFrom(key_pb) + + +def _dataset_ids_equal(dataset_id1, dataset_id2): + """Compares two dataset IDs for fuzzy equality. + + Each may be prefixed or unprefixed (but not null, since dataset ID + is required on a key). The only allowed prefixes are 's~' and 'e~'. + + Two identical prefixed match + + >>> 's~foo' == 's~foo' + >>> 'e~bar' == 'e~bar' + + while non-identical prefixed don't + + >>> 's~foo' != 's~bar' + >>> 's~foo' != 'e~foo' + + As for non-prefixed, they can match other non-prefixed or + prefixed: + + >>> 'foo' == 'foo' + >>> 'foo' == 's~foo' + >>> 'foo' == 'e~foo' + >>> 'foo' != 'bar' + >>> 'foo' != 's~bar' + + (Ties are resolved since 'foo' can only be an alias for one of + s~foo or e~foo in the backend.) + + :type dataset_id1: string + :param dataset_id1: A dataset ID. + + :type dataset_id2: string + :param dataset_id2: A dataset ID. + + :rtype: boolean + :returns: Boolean indicating if the IDs are the same. + """ + if dataset_id1 == dataset_id2: + return True + + if dataset_id1.startswith('s~') or dataset_id1.startswith('e~'): + # If `dataset_id1` is prefixed and not matching, then the only way + # they can match is if `dataset_id2` is unprefixed. + return dataset_id1[2:] == dataset_id2 + elif dataset_id2.startswith('s~') or dataset_id2.startswith('e~'): + # Here we know `dataset_id1` is unprefixed and `dataset_id2` + # is prefixed. + return dataset_id1 == dataset_id2[2:] + + return False diff --git a/gcloud/datastore/test_api.py b/gcloud/datastore/test_api.py index 52b9ea0f9d85..abf238d796a0 100644 --- a/gcloud/datastore/test_api.py +++ b/gcloud/datastore/test_api.py @@ -19,39 +19,95 @@ class Test__require_dataset_id(unittest2.TestCase): _MARKER = object() - def _callFUT(self, passed=_MARKER): + def _callFUT(self, passed=_MARKER, first_key=None): from gcloud.datastore.api import _require_dataset_id if passed is self._MARKER: - return _require_dataset_id() - return _require_dataset_id(passed) + return _require_dataset_id(first_key=first_key) + return _require_dataset_id(dataset_id=passed, first_key=first_key) def _monkey(self, dataset_id): from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey return _Monkey(_implicit_environ, DATASET_ID=dataset_id) - def test_implicit_unset(self): + def test_implicit_unset_wo_keys(self): with self._monkey(None): with self.assertRaises(EnvironmentError): self._callFUT() - def test_implicit_unset_passed_explicitly(self): + def test_implicit_unset_w_keys(self): + from gcloud.datastore.test_batch import _Key + ID = 'DATASET' + with self._monkey(None): + self.assertEqual(self._callFUT(first_key=_Key(ID)), ID) + + def test_implicit_unset_w_existing_batch_wo_keys(self): + ID = 'DATASET' + with self._monkey(None): + with _NoCommitBatch(dataset_id=ID, connection=object()): + self.assertEqual(self._callFUT(), ID) + + def test_implicit_unset_w_existing_batch_w_keys(self): + from gcloud.datastore.test_batch import _Key + ID = 'DATASET' + OTHER = 'OTHER' + with self._monkey(None): + with _NoCommitBatch(dataset_id=ID, connection=object()): + self.assertEqual(self._callFUT(first_key=_Key(OTHER)), ID) + + def test_implicit_unset_w_existing_transaction_wo_keys(self): + ID = 'DATASET' + with self._monkey(None): + with _NoCommitTransaction(dataset_id=ID, connection=object()): + self.assertEqual(self._callFUT(), ID) + + def test_implicit_unset_w_existing_transaction_w_keys(self): + from gcloud.datastore.test_batch import _Key + ID = 'DATASET' + OTHER = 'OTHER' + with self._monkey(None): + with _NoCommitTransaction(dataset_id=ID, connection=object()): + self.assertEqual(self._callFUT(first_key=_Key(OTHER)), ID) + + def test_implicit_unset_passed_explicitly_wo_keys(self): ID = 'DATASET' with self._monkey(None): self.assertEqual(self._callFUT(ID), ID) - def test_id_implicit_set(self): + def test_implicit_unset_passed_explicitly_w_keys(self): + from gcloud.datastore.test_batch import _Key + ID = 'DATASET' + OTHER = 'OTHER' + with self._monkey(None): + self.assertEqual(self._callFUT(ID, first_key=_Key(OTHER)), ID) + + def test_id_implicit_set_wo_keys(self): IMPLICIT_ID = 'IMPLICIT' with self._monkey(IMPLICIT_ID): stored_id = self._callFUT() self.assertTrue(stored_id is IMPLICIT_ID) - def test_id_implicit_set_passed_explicitly(self): + def test_id_implicit_set_w_keys(self): + from gcloud.datastore.test_batch import _Key + IMPLICIT_ID = 'IMPLICIT' + OTHER = 'OTHER' + with self._monkey(IMPLICIT_ID): + self.assertEqual(self._callFUT(first_key=_Key(OTHER)), OTHER) + + def test_id_implicit_set_passed_explicitly_wo_keys(self): ID = 'DATASET' IMPLICIT_ID = 'IMPLICIT' with self._monkey(IMPLICIT_ID): self.assertEqual(self._callFUT(ID), ID) + def test_id_implicit_set_passed_explicitly_w_keys(self): + from gcloud.datastore.test_batch import _Key + ID = 'DATASET' + IMPLICIT_ID = 'IMPLICIT' + OTHER = 'OTHER' + with self._monkey(IMPLICIT_ID): + self.assertEqual(self._callFUT(ID, first_key=_Key(OTHER)), ID) + class Test__require_connection(unittest2.TestCase): @@ -73,6 +129,20 @@ def test_implicit_unset(self): with self.assertRaises(EnvironmentError): self._callFUT() + def test_implicit_unset_w_existing_batch(self): + ID = 'DATASET' + CONNECTION = object() + with self._monkey(None): + with _NoCommitBatch(dataset_id=ID, connection=CONNECTION): + self.assertEqual(self._callFUT(), CONNECTION) + + def test_implicit_unset_w_existing_transaction(self): + ID = 'DATASET' + CONNECTION = object() + with self._monkey(None): + with _NoCommitTransaction(dataset_id=ID, connection=CONNECTION): + self.assertEqual(self._callFUT(), CONNECTION) + def test_implicit_unset_passed_explicitly(self): CONNECTION = object() with self._monkey(None): @@ -90,43 +160,37 @@ def test_implicit_set_passed_explicitly(self): self.assertTrue(self._callFUT(CONNECTION) is CONNECTION) -class Test__get_dataset_id_from_keys(unittest2.TestCase): - - def _callFUT(self, keys): - from gcloud.datastore.api import _get_dataset_id_from_keys - return _get_dataset_id_from_keys(keys) - - def _make_key(self, dataset_id): - - class _Key(object): - def __init__(self, dataset_id): - self.dataset_id = dataset_id - - return _Key(dataset_id) - - def test_empty(self): - self.assertRaises(IndexError, self._callFUT, []) +class Test_get_function(unittest2.TestCase): - def test_w_None(self): - self.assertRaises(ValueError, self._callFUT, [None]) + def _callFUT(self, keys, missing=None, deferred=None, + connection=None, dataset_id=None): + from gcloud.datastore.api import get + return get(keys, missing=missing, deferred=deferred, + connection=connection, dataset_id=dataset_id) - def test_w_mismatch(self): - key1 = self._make_key('foo') - key2 = self._make_key('bar') - self.assertRaises(ValueError, self._callFUT, [key1, key2]) + def _make_entity_pb(self, dataset_id, kind, integer_id, + name=None, str_val=None): + from gcloud.datastore import _datastore_v1_pb2 as datastore_pb - def test_w_match(self): - key1 = self._make_key('foo') - key2 = self._make_key('foo') - self.assertEqual(self._callFUT([key1, key2]), 'foo') + entity_pb = datastore_pb.Entity() + entity_pb.key.partition_id.dataset_id = dataset_id + path_element = entity_pb.key.path_element.add() + path_element.kind = kind + path_element.id = integer_id + if name is not None and str_val is not None: + prop = entity_pb.property.add() + prop.name = name + prop.value.string_value = str_val + return entity_pb -class Test_get_function(unittest2.TestCase): + def test_wo_connection(self): + from gcloud.datastore.key import Key - def _callFUT(self, keys, missing=None, deferred=None, connection=None): - from gcloud.datastore.api import get - return get(keys, missing=missing, deferred=deferred, - connection=connection) + DATASET_ID = 'DATASET' + key = Key('Kind', 1234, dataset_id=DATASET_ID) + self.assertRaises(EnvironmentError, + self._callFUT, [key], dataset_id=DATASET_ID) def test_no_keys(self): results = self._callFUT([]) @@ -136,11 +200,28 @@ def test_miss(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection + DATASET_ID = 'DATASET' + connection = _Connection() + key = Key('Kind', 1234, dataset_id=DATASET_ID) + results = self._callFUT([key], connection=connection, + dataset_id=DATASET_ID) + self.assertEqual(results, []) + + def test_miss_wo_dataset_id(self): + from gcloud.datastore.key import Key + from gcloud.datastore.test_connection import _Connection + DATASET_ID = 'DATASET' connection = _Connection() key = Key('Kind', 1234, dataset_id=DATASET_ID) results = self._callFUT([key], connection=connection) self.assertEqual(results, []) + expected = { + 'dataset_id': DATASET_ID, + 'key_pbs': [key.to_protobuf()], + 'transaction_id': None, + } + self.assertEqual(connection._called_with, expected) def test_miss_w_missing(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb @@ -164,7 +245,8 @@ def test_miss_w_missing(self): key = Key(KIND, ID, dataset_id=DATASET_ID) missing = [] - entities = self._callFUT([key], connection=connection, missing=missing) + entities = self._callFUT([key], connection=connection, + missing=missing, dataset_id=DATASET_ID) self.assertEqual(entities, []) self.assertEqual([missed.key.to_protobuf() for missed in missing], [key.to_protobuf()]) @@ -182,27 +264,11 @@ def test_miss_w_deferred(self): deferred = [] entities = self._callFUT([key], connection=connection, - deferred=deferred) + deferred=deferred, dataset_id=DATASET_ID) self.assertEqual(entities, []) self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key.to_protobuf()]) - def _make_entity_pb(self, dataset_id, kind, integer_id, - name=None, str_val=None): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb - - entity_pb = datastore_pb.Entity() - entity_pb.key.partition_id.dataset_id = dataset_id - path_element = entity_pb.key.path_element.add() - path_element.kind = kind - path_element.id = integer_id - if name is not None and str_val is not None: - prop = entity_pb.property.add() - prop.name = name - prop.value.string_value = str_val - - return entity_pb - def test_hit(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection @@ -220,7 +286,8 @@ def test_hit(self): connection = _Connection(entity_pb) key = Key(KIND, ID, dataset_id=DATASET_ID) - result, = self._callFUT([key], connection=connection) + result, = self._callFUT([key], connection=connection, + dataset_id=DATASET_ID) new_key = result.key # Check the returned value is as expected. @@ -249,7 +316,7 @@ def test_hit_multiple_keys_same_dataset(self): key1 = Key(KIND, ID1, dataset_id=DATASET_ID) key2 = Key(KIND, ID2, dataset_id=DATASET_ID) retrieved1, retrieved2 = self._callFUT( - [key1, key2], connection=connection) + [key1, key2], connection=connection, dataset_id=DATASET_ID) # Check values match. self.assertEqual(retrieved1.key.path, key1.path) @@ -269,7 +336,8 @@ def test_hit_multiple_keys_different_dataset(self): key1 = Key('KIND', 1234, dataset_id=DATASET_ID1) key2 = Key('KIND', 1234, dataset_id=DATASET_ID2) with self.assertRaises(ValueError): - self._callFUT([key1, key2], connection=object()) + self._callFUT([key1, key2], connection=object(), + dataset_id=DATASET_ID1) def test_implicit_wo_transaction(self): from gcloud.datastore import _implicit_environ @@ -328,7 +396,8 @@ def test_w_transaction(self): key = Key(KIND, ID, dataset_id=DATASET_ID) with _NoCommitTransaction(DATASET_ID, CUSTOM_CONNECTION, TRANSACTION): - result, = self._callFUT([key], connection=CUSTOM_CONNECTION) + result, = self._callFUT([key], connection=CUSTOM_CONNECTION, + dataset_id=DATASET_ID) expected_called_with = { 'dataset_id': DATASET_ID, @@ -348,9 +417,56 @@ def test_w_transaction(self): class Test_put_function(unittest2.TestCase): - def _callFUT(self, entities, connection=None): + def _callFUT(self, entities, connection=None, dataset_id=None): from gcloud.datastore.api import put - return put(entities, connection=connection) + return put(entities, connection=connection, dataset_id=dataset_id) + + def test_no_connection(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_batch import _Entity + from gcloud.datastore.test_batch import _Key + + # Build basic mocks needed to delete. + _DATASET = 'DATASET' + entity = _Entity(foo=u'bar') + entity.key = _Key(_DATASET) + + self.assertEqual(_implicit_environ.CONNECTION, None) + with self.assertRaises(EnvironmentError): + self._callFUT([entity], dataset_id=_DATASET) + + def test_no_dataset_id(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_batch import _Connection + from gcloud.datastore.test_batch import _Entity + from gcloud.datastore.test_batch import _Key + + # Build basic mocks needed to delete. + _DATASET = 'DATASET' + connection = _Connection() + entity = _Entity(foo=u'bar') + entity.key = _Key(_DATASET) + + self.assertEqual(_implicit_environ.CONNECTION, None) + result = self._callFUT([entity], connection=connection) + + self.assertEqual(result, None) + self.assertEqual(len(connection._committed), 1) + dataset_id, mutation = connection._committed[0] + self.assertEqual(dataset_id, _DATASET) + upserts = list(mutation.upsert) + self.assertEqual(len(upserts), 1) + self.assertEqual(upserts[0].key, entity.key.to_protobuf()) + properties = list(upserts[0].property) + self.assertEqual(properties[0].name, 'foo') + self.assertEqual(properties[0].value.string_value, u'bar') + + def test_no_entities(self): + from gcloud.datastore import _implicit_environ + + self.assertEqual(_implicit_environ.CONNECTION, None) + result = self._callFUT([]) + self.assertEqual(result, None) def test_no_batch_w_partial_key(self): from gcloud.datastore.test_batch import _Connection @@ -364,7 +480,8 @@ def test_no_batch_w_partial_key(self): key = entity.key = _Key(_DATASET) key._id = None - result = self._callFUT([entity], connection=connection) + result = self._callFUT([entity], connection=connection, + dataset_id=_DATASET) self.assertEqual(result, None) self.assertEqual(len(connection._committed), 1) dataset_id, mutation = connection._committed[0] @@ -430,33 +547,51 @@ def test_implicit_connection(self): self.assertEqual(properties[0].value.string_value, u'bar') self.assertEqual(len(CURR_BATCH.mutation.delete), 0) - def test_no_entities(self): + +class Test_delete_function(unittest2.TestCase): + + def _callFUT(self, keys, connection=None, dataset_id=None): + from gcloud.datastore.api import delete + return delete(keys, connection=connection, dataset_id=dataset_id) + + def test_no_connection(self): from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_batch import _Key + + # Build basic mocks needed to delete. + _DATASET = 'DATASET' + key = _Key(_DATASET) self.assertEqual(_implicit_environ.CONNECTION, None) - result = self._callFUT([]) - self.assertEqual(result, None) + with self.assertRaises(EnvironmentError): + self._callFUT([key], dataset_id=_DATASET) - def test_no_connection(self): + def test_no_dataset_id(self): from gcloud.datastore import _implicit_environ - from gcloud.datastore.test_batch import _Entity + from gcloud.datastore.test_batch import _Connection from gcloud.datastore.test_batch import _Key # Build basic mocks needed to delete. _DATASET = 'DATASET' - entity = _Entity(foo=u'bar') - entity.key = _Key(_DATASET) + connection = _Connection() + key = _Key(_DATASET) self.assertEqual(_implicit_environ.CONNECTION, None) - with self.assertRaises(ValueError): - self._callFUT([entity]) + result = self._callFUT([key], connection=connection) -class Test_delete_function(unittest2.TestCase): + self.assertEqual(result, None) + self.assertEqual(len(connection._committed), 1) + dataset_id, mutation = connection._committed[0] + self.assertEqual(dataset_id, _DATASET) + self.assertEqual(list(mutation.delete), [key.to_protobuf()]) - def _callFUT(self, keys, connection=None): - from gcloud.datastore.api import delete - return delete(keys, connection=connection) + def test_no_keys(self): + from gcloud.datastore import _implicit_environ + + self.assertEqual(_implicit_environ.CONNECTION, None) + result = self._callFUT([]) + self.assertEqual(result, None) def test_no_batch(self): from gcloud.datastore.test_batch import _Connection @@ -467,14 +602,37 @@ def test_no_batch(self): connection = _Connection() key = _Key(_DATASET) - result = self._callFUT([key], connection=connection) + result = self._callFUT([key], connection=connection, + dataset_id=_DATASET) + self.assertEqual(result, None) + self.assertEqual(len(connection._committed), 1) + dataset_id, mutation = connection._committed[0] + self.assertEqual(dataset_id, _DATASET) + self.assertEqual(list(mutation.delete), [key.to_protobuf()]) + + def test_wo_batch_w_key_different_than_default_dataset_id(self): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_batch import _Connection + from gcloud.datastore.test_batch import _Key + + # Build basic mocks needed to delete. + _DEFAULT_DATASET = 'DEFAULT' + _DATASET = 'DATASET' + connection = _Connection() + key = _Key(_DATASET) + + with _Monkey(_implicit_environ, + CONNECTION=connection, + DATASET_ID=_DEFAULT_DATASET): + result = self._callFUT([key]) self.assertEqual(result, None) self.assertEqual(len(connection._committed), 1) dataset_id, mutation = connection._committed[0] self.assertEqual(dataset_id, _DATASET) self.assertEqual(list(mutation.delete), [key.to_protobuf()]) - def test_existing_batch(self): + def test_w_existing_batch(self): from gcloud.datastore.test_batch import _Connection from gcloud.datastore.test_batch import _Key @@ -485,7 +643,7 @@ def test_existing_batch(self): # Set up Batch on stack so we can check it is used. with _NoCommitBatch(_DATASET, connection) as CURR_BATCH: - result = self._callFUT([key], connection=connection) + result = self._callFUT([key]) self.assertEqual(result, None) self.assertEqual(len(CURR_BATCH.mutation.insert_auto_id), 0) @@ -495,9 +653,7 @@ def test_existing_batch(self): self.assertEqual(deletes[0], key._key) self.assertEqual(len(connection._committed), 0) - def test_implicit_connection(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ + def test_w_existing_transaction(self): from gcloud.datastore.test_batch import _Connection from gcloud.datastore.test_batch import _Key @@ -506,10 +662,9 @@ def test_implicit_connection(self): connection = _Connection() key = _Key(_DATASET) - with _Monkey(_implicit_environ, CONNECTION=connection): - # Set up Batch on stack so we can check it is used. - with _NoCommitBatch(_DATASET, connection) as CURR_BATCH: - result = self._callFUT([key]) + # Set up Batch on stack so we can check it is used. + with _NoCommitTransaction(_DATASET, connection) as CURR_BATCH: + result = self._callFUT([key]) self.assertEqual(result, None) self.assertEqual(len(CURR_BATCH.mutation.insert_auto_id), 0) @@ -519,24 +674,31 @@ def test_implicit_connection(self): self.assertEqual(deletes[0], key._key) self.assertEqual(len(connection._committed), 0) - def test_no_keys(self): - from gcloud.datastore import _implicit_environ - - self.assertEqual(_implicit_environ.CONNECTION, None) - result = self._callFUT([]) - self.assertEqual(result, None) - - def test_no_connection(self): + def test_implicit_connection_and_dataset_id(self): + from gcloud._testing import _Monkey from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_batch import _Connection from gcloud.datastore.test_batch import _Key # Build basic mocks needed to delete. _DATASET = 'DATASET' + connection = _Connection() key = _Key(_DATASET) - self.assertEqual(_implicit_environ.CONNECTION, None) - with self.assertRaises(ValueError): - self._callFUT([key]) + with _Monkey(_implicit_environ, + CONNECTION=connection, + DATASET_ID=_DATASET): + # Set up Batch on stack so we can check it is used. + with _NoCommitBatch(_DATASET, connection) as CURR_BATCH: + result = self._callFUT([key]) + + self.assertEqual(result, None) + self.assertEqual(len(CURR_BATCH.mutation.insert_auto_id), 0) + self.assertEqual(len(CURR_BATCH.mutation.upsert), 0) + deletes = list(CURR_BATCH.mutation.delete) + self.assertEqual(len(deletes), 1) + self.assertEqual(deletes[0], key._key) + self.assertEqual(len(connection._committed), 0) class Test_allocate_ids_function(unittest2.TestCase): @@ -610,7 +772,7 @@ def __exit__(self, *args): class _NoCommitTransaction(object): - def __init__(self, dataset_id, connection, transaction_id): + def __init__(self, dataset_id, connection, transaction_id='TRANSACTION'): from gcloud.datastore.transaction import Transaction xact = self._transaction = Transaction(dataset_id, connection) xact._id = transaction_id diff --git a/gcloud/datastore/test_batch.py b/gcloud/datastore/test_batch.py index 6121bd83dc83..579fee92a612 100644 --- a/gcloud/datastore/test_batch.py +++ b/gcloud/datastore/test_batch.py @@ -116,7 +116,7 @@ def test_add_auto_id_entity_w_partial_key(self): connection = _Connection() batch = self._makeOne(dataset_id=_DATASET, connection=connection) entity = _Entity() - key = entity.key = _Key(_Entity) + key = entity.key = _Key(_DATASET) key._id = None batch.add_auto_id_entity(entity) @@ -128,7 +128,7 @@ def test_add_auto_id_entity_w_completed_key(self): connection = _Connection() batch = self._makeOne(dataset_id=_DATASET, connection=connection) entity = _Entity() - entity.key = _Key(_Entity) + entity.key = _Key(_DATASET) self.assertRaises(ValueError, batch.add_auto_id_entity, entity) @@ -139,6 +139,15 @@ def test_put_entity_wo_key(self): self.assertRaises(ValueError, batch.put, _Entity()) + def test_put_entity_w_key_wrong_dataset_id(self): + _DATASET = 'DATASET' + connection = _Connection() + batch = self._makeOne(dataset_id=_DATASET, connection=connection) + entity = _Entity() + entity.key = _Key('OTHER') + + self.assertRaises(ValueError, batch.put, entity) + def test_put_entity_w_partial_key(self): _DATASET = 'DATASET' _PROPERTIES = {'foo': 'bar'} @@ -194,6 +203,41 @@ def test_put_entity_w_completed_key(self): deletes = list(batch.mutation.delete) self.assertEqual(len(deletes), 0) + def test_put_entity_w_completed_key_prefixed_dataset_id(self): + _DATASET = 'DATASET' + _PROPERTIES = { + 'foo': 'bar', + 'baz': 'qux', + 'spam': [1, 2, 3], + 'frotz': [], # will be ignored + } + connection = _Connection() + batch = self._makeOne(dataset_id=_DATASET, connection=connection) + entity = _Entity(_PROPERTIES) + entity.exclude_from_indexes = ('baz', 'spam') + key = entity.key = _Key('s~' + _DATASET) + + batch.put(entity) + + insert_auto_ids = list(batch.mutation.insert_auto_id) + self.assertEqual(len(insert_auto_ids), 0) + upserts = list(batch.mutation.upsert) + self.assertEqual(len(upserts), 1) + + upsert = upserts[0] + self.assertEqual(upsert.key, key._key) + props = dict([(prop.name, prop.value) for prop in upsert.property]) + self.assertTrue(props['foo'].indexed) + self.assertFalse(props['baz'].indexed) + self.assertTrue(props['spam'].indexed) + self.assertFalse(props['spam'].list_value[0].indexed) + self.assertFalse(props['spam'].list_value[1].indexed) + self.assertFalse(props['spam'].list_value[2].indexed) + self.assertFalse('frotz' in props) + + deletes = list(batch.mutation.delete) + self.assertEqual(len(deletes), 0) + def test_delete_w_partial_key(self): _DATASET = 'DATASET' connection = _Connection() @@ -203,6 +247,14 @@ def test_delete_w_partial_key(self): self.assertRaises(ValueError, batch.delete, key) + def test_delete_w_key_wrong_dataset_id(self): + _DATASET = 'DATASET' + connection = _Connection() + batch = self._makeOne(dataset_id=_DATASET, connection=connection) + key = _Key('OTHER') + + self.assertRaises(ValueError, batch.delete, key) + def test_delete_w_completed_key(self): _DATASET = 'DATASET' connection = _Connection() @@ -219,6 +271,22 @@ def test_delete_w_completed_key(self): self.assertEqual(len(deletes), 1) self.assertEqual(deletes[0], key._key) + def test_delete_w_completed_key_w_prefixed_dataset_id(self): + _DATASET = 'DATASET' + connection = _Connection() + batch = self._makeOne(dataset_id=_DATASET, connection=connection) + key = _Key('s~' + _DATASET) + + batch.delete(key) + + insert_auto_ids = list(batch.mutation.insert_auto_id) + self.assertEqual(len(insert_auto_ids), 0) + upserts = list(batch.mutation.upsert) + self.assertEqual(len(upserts), 0) + deletes = list(batch.mutation.delete) + self.assertEqual(len(deletes), 1) + self.assertEqual(deletes[0], key._key) + def test_commit(self): _DATASET = 'DATASET' connection = _Connection() diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 6602d5ed6a61..b087444ab97d 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -517,3 +517,27 @@ def test_prepare_dataset_id_unset(self): key = datastore_pb.Key() new_key = self._callFUT(key) self.assertTrue(new_key is key) + + +class Test__dataset_ids_equal(unittest2.TestCase): + + def _callFUT(self, dataset_id1, dataset_id2): + from gcloud.datastore.helpers import _dataset_ids_equal + return _dataset_ids_equal(dataset_id1, dataset_id2) + + def test_identical_prefixed(self): + self.assertTrue(self._callFUT('s~foo', 's~foo')) + self.assertTrue(self._callFUT('e~bar', 'e~bar')) + + def test_different_prefixed(self): + self.assertFalse(self._callFUT('s~foo', 's~bar')) + self.assertFalse(self._callFUT('s~foo', 'e~foo')) + + def test_all_unprefixed(self): + self.assertTrue(self._callFUT('foo', 'foo')) + self.assertFalse(self._callFUT('foo', 'bar')) + + def test_unprefixed_with_prefixed(self): + self.assertTrue(self._callFUT('foo', 's~foo')) + self.assertTrue(self._callFUT('foo', 'e~foo')) + self.assertFalse(self._callFUT('foo', 's~bar'))