From 48d201551c48cd57a4c1f93b78bd46886050979d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 31 Dec 2014 14:27:58 -0800 Subject: [PATCH] Make dataset_id required on Key. Addresses seventh part of #451. Also requires removing the dataset_id from generated protobufs due to GoogleCloudPlatform/google-cloud-datastore#59. This occurs also in __key__ filters and ancestor queries. --- gcloud/datastore/connection.py | 12 ++---- gcloud/datastore/entity.py | 2 +- gcloud/datastore/helpers.py | 42 +++++++++++++++++++ gcloud/datastore/key.py | 37 ++++++++++------- gcloud/datastore/query.py | 12 +++++- gcloud/datastore/test_connection.py | 46 +++++++++++++-------- gcloud/datastore/test_dataset.py | 9 ++++ gcloud/datastore/test_entity.py | 4 +- gcloud/datastore/test_helpers.py | 34 ++++++++++++++- gcloud/datastore/test_key.py | 25 ++++++++--- gcloud/datastore/test_query.py | 62 +++++++++++++++++++++++----- gcloud/datastore/test_transaction.py | 25 +++++------ gcloud/datastore/transaction.py | 8 +--- regression/populate_datastore.py | 2 +- 14 files changed, 234 insertions(+), 86 deletions(-) diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index ba57de9269e0..c71b23a33592 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -241,8 +241,7 @@ def lookup(self, dataset_id, key_pbs, if single_key: key_pbs = [key_pbs] - for key_pb in key_pbs: - lookup_request.key.add().CopyFrom(key_pb) + helpers._add_keys_to_request(lookup_request.key, key_pbs) results, missing_found, deferred_found = self._lookup( lookup_request, dataset_id, deferred is not None) @@ -417,8 +416,7 @@ def allocate_ids(self, dataset_id, key_pbs): :returns: An equal number of keys, with IDs filled in by the backend. """ request = datastore_pb.AllocateIdsRequest() - for key_pb in key_pbs: - request.key.add().CopyFrom(key_pb) + helpers._add_keys_to_request(request.key, key_pbs) # Nothing to do with this response, so just execute the method. response = self._rpc(dataset_id, 'allocateIds', request, datastore_pb.AllocateIdsResponse) @@ -451,6 +449,7 @@ def save_entity(self, dataset_id, key_pb, properties, either `None` or an integer that has been assigned. """ mutation = self.mutation() + key_pb = helpers._prepare_key_for_request(key_pb) # If the Key is complete, we should upsert # instead of using insert_auto_id. @@ -515,10 +514,7 @@ def delete_entities(self, dataset_id, key_pbs): :returns: True """ mutation = self.mutation() - - for key_pb in key_pbs: - delete = mutation.delete.add() - delete.CopyFrom(key_pb) + helpers._add_keys_to_request(mutation.delete, key_pbs) if not self.transaction(): self.commit(dataset_id, mutation) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 2e30821b9f65..382c22552b39 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -99,7 +99,7 @@ def __init__(self, dataset=None, kind=None, exclude_from_indexes=()): # _implicit_environ._DatastoreBase to avoid split MRO. self._dataset = dataset or _implicit_environ.DATASET if kind: - self._key = Key(kind) + self._key = Key(kind, dataset_id=self.dataset().id()) else: self._key = None self._exclude_from_indexes = set(exclude_from_indexes) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 2231f5b5030c..3b9023336a34 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -25,6 +25,7 @@ import pytz import six +from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key @@ -259,3 +260,44 @@ def _set_protobuf_value(value_pb, val): _set_protobuf_value(i_pb, item) else: # scalar, just assign setattr(value_pb, attr, val) + + +def _prepare_key_for_request(key_pb): + """Add protobuf keys to a request object. + + :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param key_pb: A key to be added to a request. + + :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :returns: A key which will be added to a request. It will be the + original if nothing needs to be changed. + """ + if key_pb.partition_id.HasField('dataset_id'): + # We remove the dataset_id from the protobuf. This is because + # the backend fails a request if the key contains un-prefixed + # dataset ID. The backend fails because requests to + # /datastore/.../datasets/foo/... + # and + # /datastore/.../datasets/s~foo/... + # both go to the datastore given by 's~foo'. So if the key + # protobuf in the request body has dataset_id='foo', the + # backend will reject since 'foo' != 's~foo'. + new_key_pb = datastore_pb.Key() + new_key_pb.CopyFrom(key_pb) + new_key_pb.partition_id.ClearField('dataset_id') + key_pb = new_key_pb + return key_pb + + +def _add_keys_to_request(request_field_pb, key_pbs): + """Add protobuf keys to a request object. + + :type request_field_pb: `RepeatedCompositeFieldContainer` + :param request_field_pb: A repeated proto field that contains keys. + + :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param key_pbs: The keys to add to a request. + """ + for key_pb in key_pbs: + key_pb = _prepare_key_for_request(key_pb) + request_field_pb.add().CopyFrom(key_pb) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 6c95439665f3..2e8b26ddd10b 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -18,6 +18,7 @@ from itertools import izip import six +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb @@ -56,24 +57,30 @@ def __init__(self, *path_args, **kwargs): passed as a keyword argument. :type dataset_id: string - :param dataset_id: The dataset ID associated with the key. Can only be - passed as a keyword argument. - - # This note will be obsolete by the end of #451. - - .. note:: - The key's ``_dataset_id`` field must be None for keys created - by application code. The - :func:`gcloud.datastore.helpers.key_from_protobuf` factory - will be set the field to an appropriate value for keys - returned from the datastore backend. The application - **must** treat any value set by the back-end as opaque. + :param dataset_id: The dataset ID associated with the key. This is + required. Can only be passed as a keyword argument. """ self._path = self._parse_path(path_args) self._flat_path = path_args self._parent = None self._namespace = kwargs.get('namespace') self._dataset_id = kwargs.get('dataset_id') + self._validate_dataset_id() + + def _validate_dataset_id(self): + """Ensures the dataset ID is set. + + If unset, attempts to imply the ID from the environment. + + :raises: `ValueError` if there is no `dataset_id` and none + can be implied. + """ + if self._dataset_id is None: + if _implicit_environ.DATASET is not None: + # This assumes DATASET.id() is not None. + self._dataset_id = _implicit_environ.DATASET.id() + else: + raise ValueError('A Key must have a dataset ID set.') @staticmethod def _parse_path(path_args): @@ -160,9 +167,7 @@ def to_protobuf(self): :returns: The Protobuf representing the key. """ key = datastore_pb.Key() - - if self.dataset_id is not None: - key.partition_id.dataset_id = self.dataset_id + key.partition_id.dataset_id = self.dataset_id if self.namespace: key.partition_id.namespace = self.namespace @@ -297,4 +302,4 @@ def parent(self): return self._parent def __repr__(self): - return '' % self.path + return '' % (self.path, self.dataset_id) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 5fc52f689dc2..569db3ce1d37 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -163,7 +163,14 @@ def filter(self, property_name, operator, value): property_filter.operator = pb_op_enum # Set the value to filter on based on the type. - helpers._set_protobuf_value(property_filter.value, value) + if property_name == '__key__': + if not isinstance(value, Key): + raise TypeError('__key__ query requires a Key instance.') + key_pb = value.to_protobuf() + property_filter.value.key_value.CopyFrom( + helpers._prepare_key_for_request(key_pb)) + else: + helpers._set_protobuf_value(property_filter.value, value) return clone def ancestor(self, ancestor): @@ -216,7 +223,8 @@ def ancestor(self, ancestor): ancestor_filter = composite_filter.filter.add().property_filter ancestor_filter.property.name = '__key__' ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR - ancestor_filter.value.key_value.CopyFrom(ancestor.to_protobuf()) + ancestor_pb = helpers._prepare_key_for_request(ancestor.to_protobuf()) + ancestor_filter.value.key_value.CopyFrom(ancestor_pb) return clone diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 888a95392629..2e305476b63a 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -235,7 +235,7 @@ def test_lookup_single_key_empty_response(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) def test_lookup_single_key_empty_response_w_eventual(self): from gcloud.datastore.connection import datastore_pb @@ -261,7 +261,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) self.assertEqual(request.read_options.read_consistency, datastore_pb.ReadOptions.EVENTUAL) self.assertEqual(request.read_options.transaction, '') @@ -301,7 +301,7 @@ def test_lookup_single_key_empty_response_w_transaction(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) self.assertEqual(request.read_options.transaction, TRANSACTION) def test_lookup_single_key_nonempty_response(self): @@ -333,7 +333,7 @@ def test_lookup_single_key_nonempty_response(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) def test_lookup_multiple_keys_empty_response(self): from gcloud.datastore.connection import datastore_pb @@ -360,8 +360,8 @@ def test_lookup_multiple_keys_empty_response(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing(self): from gcloud.datastore.connection import datastore_pb @@ -396,8 +396,8 @@ def test_lookup_multiple_keys_w_missing(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing_non_empty(self): DATASET_ID = 'DATASET' @@ -444,8 +444,8 @@ def test_lookup_multiple_keys_w_deferred(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) def test_lookup_multiple_keys_w_deferred_non_empty(self): DATASET_ID = 'DATASET' @@ -500,8 +500,8 @@ def test_lookup_multiple_keys_w_deferred_from_backend_but_not_passed(self): request.ParseFromString(cw[0]['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) self._verifyProtobufCall(cw[1], URI, conn) request.ParseFromString(cw[1]['body']) @@ -907,7 +907,9 @@ def test_allocate_ids_non_empty(self): rq_class = datastore_pb.AllocateIdsRequest request = rq_class() request.ParseFromString(cw['body']) - self.assertEqual(list(request.key), before_key_pbs) + self.assertEqual(len(request.key), len(before_key_pbs)) + for key_before, key_after in zip(before_key_pbs, request.key): + _compare_key_pb_after_request(self, key_before, key_after) def test_save_entity_wo_transaction_w_upsert(self): from gcloud.datastore.connection import datastore_pb @@ -938,7 +940,7 @@ def test_save_entity_wo_transaction_w_upsert(self): upserts = list(mutation.upsert) self.assertEqual(len(upserts), 1) upsert = upserts[0] - self.assertEqual(upsert.key, key_pb) + _compare_key_pb_after_request(self, key_pb, upsert.key) props = list(upsert.property) self.assertEqual(len(props), 1) self.assertEqual(props[0].name, 'foo') @@ -979,7 +981,7 @@ def test_save_entity_w_exclude_from_indexes(self): upserts = list(mutation.upsert) self.assertEqual(len(upserts), 1) upsert = upserts[0] - self.assertEqual(upsert.key, key_pb) + _compare_key_pb_after_request(self, key_pb, upsert.key) props = sorted(upsert.property, key=operator.attrgetter('name'), reverse=True) @@ -1028,7 +1030,7 @@ def test_save_entity_wo_transaction_w_auto_id(self): mutation = request.mutation inserts = list(mutation.insert_auto_id) insert = inserts[0] - self.assertEqual(insert.key, key_pb) + _compare_key_pb_after_request(self, key_pb, insert.key) props = list(insert.property) self.assertEqual(len(props), 1) self.assertEqual(props[0].name, 'foo') @@ -1112,7 +1114,7 @@ def test_delete_entities_wo_transaction(self): deletes = list(mutation.delete) self.assertEqual(len(deletes), 1) delete = deletes[0] - self.assertEqual(delete, key_pb) + _compare_key_pb_after_request(self, key_pb, delete) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) def test_delete_entities_w_transaction(self): @@ -1168,3 +1170,13 @@ def __init__(self, id): def id(self): return self._id + + +def _compare_key_pb_after_request(test, key_before, key_after): + test.assertFalse(key_after.partition_id.HasField('dataset_id')) + test.assertEqual(key_before.partition_id.namespace, + key_after.partition_id.namespace) + test.assertEqual(len(key_before.path_element), + len(key_after.path_element)) + for elt1, elt2 in zip(key_before.path_element, key_after.path_element): + test.assertEqual(elt1, elt2) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index d8b83183a72e..cec7b70496bf 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -109,6 +109,15 @@ def test_get_entity_hit(self): self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') + def test_get_entity_bad_type(self): + DATASET_ID = 'DATASET' + connection = _Connection() + dataset = self._makeOne(DATASET_ID, connection) + with self.assertRaises(AttributeError): + dataset.get_entity(None) + with self.assertRaises(AttributeError): + dataset.get_entity([]) + def test_get_entities_miss(self): from gcloud.datastore.key import Key DATASET_ID = 'DATASET' diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index e4ae5575a587..8fe9620f5574 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -61,7 +61,7 @@ def test_key_getter(self): entity = self._makeOne() key = entity.key() self.assertIsInstance(key, Key) - self.assertEqual(key.dataset_id, None) + self.assertEqual(key.dataset_id, entity.dataset().id()) self.assertEqual(key.kind, _KIND) def test_key_setter(self): @@ -196,7 +196,7 @@ def test_save_w_returned_key_exclude_from_indexes(self): connection = _Connection() connection._save_result = (True, _ID) dataset = _Dataset(connection) - key = Key('KIND', dataset_id='DATASET') + key = Key('KIND', dataset_id=_DATASET_ID) entity = self._makeOne(dataset, exclude_from_indexes=['foo']) entity.key(key) entity['foo'] = 'Foo' diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index dcf36d796cc7..2e92295c4cbb 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -90,10 +90,11 @@ def _callFUT(self, val): return key_from_protobuf(val) - def _makePB(self, dataset_id, namespace=None, path=()): + def _makePB(self, dataset_id=None, namespace=None, path=()): from gcloud.datastore.datastore_v1_pb2 import Key pb = Key() - pb.partition_id.dataset_id = dataset_id + if dataset_id is not None: + pb.partition_id.dataset_id = dataset_id if namespace is not None: pb.partition_id.namespace = namespace for elem in path: @@ -131,6 +132,10 @@ def test_w_path_in_pb(self): key = self._callFUT(pb) self.assertEqual(key.path, _PATH) + def test_w_nothing_in_pb(self): + pb = self._makePB() + self.assertRaises(ValueError, self._callFUT, pb) + class Test__pb_attr_value(unittest2.TestCase): @@ -465,3 +470,28 @@ def test_list(self): self.assertEqual(marshalled[0].string_value, values[0]) self.assertEqual(marshalled[1].integer_value, values[1]) self.assertEqual(marshalled[2].double_value, values[2]) + + +class Test__prepare_key_for_request(unittest2.TestCase): + + def _callFUT(self, key_pb): + from gcloud.datastore.helpers import _prepare_key_for_request + + return _prepare_key_for_request(key_pb) + + def test_prepare_dataset_valid(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + key = datastore_pb.Key() + key.partition_id.dataset_id = 'foo' + new_key = self._callFUT(key) + self.assertFalse(new_key is key) + + key_without = datastore_pb.Key() + new_key.ClearField('partition_id') + self.assertEqual(new_key, key_without) + + def test_prepare_dataset_unset(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + key = datastore_pb.Key() + new_key = self._callFUT(key) + self.assertTrue(new_key is key) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index c9309a420d6b..3db7d241aca2 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -17,8 +17,15 @@ class TestKey(unittest2.TestCase): + def setUp(self): + self._DEFAULT_DATASET = 'DATASET' + def _getTargetClass(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + + _implicit_environ.DATASET = Dataset(self._DEFAULT_DATASET) return Key def _makeOne(self, *args, **kwargs): @@ -27,14 +34,22 @@ def _makeOne(self, *args, **kwargs): def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) + def test_ctor_no_dataset(self): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + klass = self._getTargetClass() + with _Monkey(_implicit_environ, DATASET=None): + self.assertRaises(ValueError, klass, 'KIND') + def test_ctor_explicit(self): - _DATASET = 'DATASET' + _DATASET = 'DATASET-ALT' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, dataset_id=_DATASET) + self.assertNotEqual(_DATASET, self._DEFAULT_DATASET) self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) @@ -49,7 +64,7 @@ def test_ctor_bad_id_or_name(self): self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None) def test__clone(self): - _DATASET = 'DATASET' + _DATASET = 'DATASET-ALT' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 @@ -94,8 +109,7 @@ def test_to_protobuf_defaults(self): self.assertTrue(isinstance(pb, KeyPB)) # Check partition ID. - self.assertEqual(pb.partition_id.dataset_id, '') - self.assertFalse(pb.partition_id.HasField('dataset_id')) + self.assertEqual(pb.partition_id.dataset_id, self._DEFAULT_DATASET) self.assertEqual(pb.partition_id.namespace, '') self.assertFalse(pb.partition_id.HasField('namespace')) @@ -108,7 +122,7 @@ def test_to_protobuf_defaults(self): self.assertFalse(elem.HasField('id')) def test_to_protobuf_w_explicit_dataset(self): - _DATASET = 'DATASET' + _DATASET = 'DATASET-ALT' key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) @@ -139,7 +153,6 @@ def test_to_protobuf_w_no_kind(self): # on this? The backend certainly will. key._path[-1].pop('kind') pb = key.to_protobuf() - self.assertEqual(pb.partition_id.dataset_id, '') self.assertFalse(pb.path_element[0].HasField('kind')) def test_is_partial_no_name_or_id(self): diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 2bcacef3e93a..1e964b0dcdbc 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -168,6 +168,31 @@ def test_filter_w_whitespace_property_name(self): self.assertEqual(p_pb.value.string_value, u'John') self.assertEqual(p_pb.operator, datastore_pb.PropertyFilter.EQUAL) + def test_filter___key__valid_key(self): + from gcloud.datastore.key import Key + from gcloud.datastore import test_connection + + query = self._makeOne() + key = Key('Foo', dataset_id='DATASET') + new_query = query.filter('__key__', '=', key) + + query_pb = new_query._pb + all_filters = query_pb.filter.composite_filter.filter + self.assertEqual(len(all_filters), 1) + + prop_filter = all_filters[0].property_filter + value_fields = prop_filter.value._fields + self.assertEqual(len(value_fields), 1) + field_name, field_value = value_fields.popitem() + self.assertEqual(field_name.name, 'key_value') + + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), field_value) + + def test_filter___key__invalid_value(self): + query = self._makeOne() + self.assertRaises(TypeError, query.filter, '__key__', '=', None) + def test_ancestor_w_non_key(self): query = self._makeOne() self.assertRaises(TypeError, query.ancestor, object()) @@ -175,10 +200,10 @@ def test_ancestor_w_non_key(self): def test_ancestor_wo_existing_ancestor_query_w_key_and_propfilter(self): from gcloud.datastore.key import Key - _KIND = 'KIND' - _ID = 123 + from gcloud.datastore import test_connection + _NAME = u'NAME' - key = Key(_KIND, _ID) + key = Key('KIND', 123, dataset_id='DATASET') query = self._makeOne().filter('name', '=', _NAME) after = query.ancestor(key) self.assertFalse(after is query) @@ -191,13 +216,14 @@ def test_ancestor_wo_existing_ancestor_query_w_key_and_propfilter(self): self.assertEqual(p_pb.value.string_value, _NAME) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') - self.assertEqual(p_pb.value.key_value, key.to_protobuf()) + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), p_pb.value.key_value) def test_ancestor_wo_existing_ancestor_query_w_key(self): from gcloud.datastore.key import Key - _KIND = 'KIND' - _ID = 123 - key = Key(_KIND, _ID) + from gcloud.datastore import test_connection + + key = Key('KIND', 123, dataset_id='DATASET') query = self._makeOne() after = query.ancestor(key) self.assertFalse(after is query) @@ -207,14 +233,22 @@ def test_ancestor_wo_existing_ancestor_query_w_key(self): f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') - self.assertEqual(p_pb.value.key_value, key.to_protobuf()) + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), p_pb.value.key_value) def test_ancestor_clears_existing_ancestor_query_w_only(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + _KIND = 'KIND' _ID = 123 - key = Key(_KIND, _ID) query = self._makeOne() + + # All keys will have dataset attached. + _implicit_environ.DATASET = Dataset('DATASET') + + key = Key(_KIND, _ID) between = query.ancestor(key) after = between.ancestor(None) self.assertFalse(after is query) @@ -223,12 +257,19 @@ def test_ancestor_clears_existing_ancestor_query_w_only(self): self.assertEqual(list(q_pb.filter.composite_filter.filter), []) def test_ancestor_clears_existing_ancestor_query_w_others(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + _KIND = 'KIND' _ID = 123 _NAME = u'NAME' - key = Key(_KIND, _ID) query = self._makeOne().filter('name', '=', _NAME) + + # All keys will have dataset attached. + _implicit_environ.DATASET = Dataset('DATASET') + + key = Key(_KIND, _ID) between = query.ancestor(key) after = between.ancestor(None) self.assertFalse(after is query) @@ -314,6 +355,7 @@ def _fetch_page_helper(self, cursor=b'\x00', limit=None, _ID = 123 _NAMESPACE = 'NAMESPACE' entity_pb = Entity() + entity_pb.key.partition_id.dataset_id = _DATASET path_element = entity_pb.key.path_element.add() path_element.kind = _KIND path_element.id = _ID diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 602aca25d9f6..b48b51208331 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -94,7 +94,8 @@ def test_commit_w_auto_ids(self): _KIND = 'KIND' _ID = 123 connection = _Connection(234) - connection._commit_result = _CommitResult(_makeKey(_KIND, _ID)) + connection._commit_result = _CommitResult( + _make_key(_KIND, _ID, _DATASET)) dataset = _Dataset(_DATASET, connection) xact = self._makeOne(dataset) entity = _Entity() @@ -157,10 +158,11 @@ class Foo(Exception): self.assertEqual(xact.id(), None) -def _makeKey(kind, id): +def _make_key(kind, id, dataset_id): from gcloud.datastore.datastore_v1_pb2 import Key key = Key() + key.partition_id.dataset_id = dataset_id elem = key.path_element.add() elem.kind = kind elem.id = id @@ -211,20 +213,13 @@ def __init__(self, *new_keys): self.insert_auto_id_key = new_keys -class _Key(object): - _path = None - - def _clone(self): - return _Key() - - class _Entity(object): - _marker = object() def __init__(self): - self._key = _Key() + from gcloud.datastore.key import Key + self._key = Key('KIND', dataset_id='DATASET') - def key(self, key=_marker): - if key is self._marker: - return self._key - self._key = key + def key(self, key=None): + if key is not None: + self._key = key + return self._key diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 3c9473bdd91f..663e6acbb2bb 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -16,7 +16,6 @@ from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore import helpers class Transaction(_implicit_environ._DatastoreBase): @@ -234,11 +233,8 @@ def commit(self): # For any of the auto-id entities, make sure we update their keys. for i, entity in enumerate(self._auto_id_entities): key_pb = result.insert_auto_id_key[i] - key = helpers.key_from_protobuf(key_pb) - # This is a temporary hack. Will be addressed in 451 #6. - new_key = entity.key()._clone() - new_key._path = key.path - entity.key(key) + new_id = key_pb.path_element[-1].id + entity.key(entity.key().completed_key(new_id)) # Tell the connection that the transaction is over. self.connection().transaction(None) diff --git a/regression/populate_datastore.py b/regression/populate_datastore.py index cb5298ff5f17..c773fd70b40f 100644 --- a/regression/populate_datastore.py +++ b/regression/populate_datastore.py @@ -87,7 +87,7 @@ def add_characters(): if key_path[-1] != character['name']: raise ValueError(('Character and key don\'t agree', key_path, character)) - key = datastore.key.Key(*key_path) + key = datastore.key.Key(*key_path, dataset_id=dataset.id()) entity = datastore.entity.Entity(dataset=dataset).key(key) entity.update(character) entity.save()