Skip to content

Commit

Permalink
Removing Key.compare_to_proto and using just the saved ID.
Browse files Browse the repository at this point in the history
Making Connection.save_entity return the auto allocated ID
instead of the entire PB.
  • Loading branch information
dhermes committed Dec 31, 2014
1 parent cc59310 commit a257a9c
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 183 deletions.
17 changes: 13 additions & 4 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,6 +444,11 @@ def save_entity(self, dataset_id, key_pb, properties,
:type exclude_from_indexes: sequence of str
:param exclude_from_indexes: Names of properties *not* to be indexed.
:rtype: :class:`tuple`
:returns: The pair (`assigned`, `new_id`) where `assigned` is a boolean
indicating if a new ID has been assigned and `new_id` is
either `None` or an integer that has been assigned.
"""
mutation = self.mutation()

Expand Down Expand Up @@ -477,14 +482,18 @@ def save_entity(self, dataset_id, key_pb, properties,
# If this is in a transaction, we should just return True. The
# transaction will handle assigning any keys as necessary.
if self.transaction():
return True
return False, None

result = self.commit(dataset_id, mutation)
# If this was an auto-assigned ID, return the new Key.
# If this was an auto-assigned ID, return the new Key. We don't
# verify that this matches the original `key_pb` but trust the
# backend to uphold the values sent (e.g. dataset ID).
if auto_id:
return result.insert_auto_id_key[0]
inserted_key_pb = result.insert_auto_id_key[0]
# Assumes the backend has set `id` without checking HasField('id').
return True, inserted_key_pb.path_element[-1].id

return True
return False, None

def delete_entities(self, dataset_id, key_pbs):
"""Delete keys from a dataset in the Cloud Datastore.
Expand Down
7 changes: 3 additions & 4 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
"""Class for representing a single entity in the Cloud Datastore."""

from gcloud.datastore import _implicit_environ
from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore.key import Key


Expand Down Expand Up @@ -241,7 +240,7 @@ def save(self):
key = self._must_key
dataset = self._must_dataset
connection = dataset.connection()
key_pb = connection.save_entity(
assigned, new_id = connection.save_entity(
dataset_id=dataset.id(),
key_pb=key.to_protobuf(),
properties=dict(self),
Expand All @@ -253,9 +252,9 @@ def save(self):
if transaction and key.is_partial:
transaction.add_auto_id_entity(self)

if isinstance(key_pb, datastore_pb.Key):
if assigned:
# Update the key (which may have been altered).
self.key(self.key().compare_to_proto(key_pb))
self.key(self.key().completed_key(new_id))

return self

Expand Down
86 changes: 0 additions & 86 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,92 +153,6 @@ def completed_key(self, id_or_name):
new_key._flat_path += (id_or_name,)
return new_key

def _validate_protobuf_dataset_id(self, protobuf):
"""Checks that dataset ID on protobuf matches current one.
The value of the dataset ID may have changed from unprefixed
(e.g. 'foo') to prefixed (e.g. 's~foo' or 'e~foo').
:type protobuf: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param protobuf: A protobuf representation of the key. Expected to be
returned after a datastore operation.
:rtype: :class:`str`
"""
proto_dataset_id = protobuf.partition_id.dataset_id
if proto_dataset_id == self.dataset_id:
return

# Since they don't match, we check to see if `proto_dataset_id` has a
# prefix.
unprefixed = None
prefix = proto_dataset_id[:2]
if prefix in ('s~', 'e~'):
unprefixed = proto_dataset_id[2:]

if unprefixed != self.dataset_id:
raise ValueError('Dataset ID on protobuf does not match.',
proto_dataset_id, self.dataset_id)

def compare_to_proto(self, protobuf):
"""Checks current key against a protobuf; updates if partial.
If the current key is partial, returns a new key that has been
completed otherwise returns the current key.
The value of the dataset ID may have changed from implicit (i.e. None,
with the ID implied from the dataset.Dataset object associated with the
Entity/Key), but if it was implicit before, we leave it as implicit.
:type protobuf: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param protobuf: A protobuf representation of the key. Expected to be
returned after a datastore operation.
:rtype: :class:`gcloud.datastore.key.Key`
:returns: The current key if not partial.
:raises: `ValueError` if the namespace or dataset ID of `protobuf`
don't match the current values or if the path from `protobuf`
doesn't match.
"""
if self.namespace is None:
if protobuf.partition_id.HasField('namespace'):
raise ValueError('Namespace unset on key but set on protobuf.')
elif protobuf.partition_id.namespace != self.namespace:
raise ValueError('Namespace on protobuf does not match.',
protobuf.partition_id.namespace, self.namespace)

# Check that dataset IDs match if not implicit.
if self.dataset_id is not None:
self._validate_protobuf_dataset_id(protobuf)

path = []
for element in protobuf.path_element:
key_part = {}
for descriptor, value in element._fields.items():
key_part[descriptor.name] = value
path.append(key_part)

if path == self.path:
return self

if not self.is_partial:
raise ValueError('Proto path does not match completed key.',
path, self.path)

last_part = path[-1]
id_or_name = None
if 'id' in last_part:
id_or_name = last_part.pop('id')
elif 'name' in last_part:
id_or_name = last_part.pop('name')

# We have edited path by popping from the last part, so check again.
if path != self.path:
raise ValueError('Proto path does not match partial key.',
path, self.path)

return self.complete_key(id_or_name)

def to_protobuf(self):
"""Return a protobuf corresponding to the key.
Expand Down
10 changes: 5 additions & 5 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -926,7 +926,7 @@ def test_save_entity_wo_transaction_w_upsert(self):
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'})
self.assertEqual(result, True)
self.assertEqual(result, (False, None))
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb.CommitRequest
Expand Down Expand Up @@ -967,7 +967,7 @@ def test_save_entity_w_exclude_from_indexes(self):
result = conn.save_entity(DATASET_ID, key_pb,
{'foo': u'Foo', 'bar': [u'bar1', u'bar2']},
exclude_from_indexes=['foo', 'bar'])
self.assertEqual(result, True)
self.assertEqual(result, (False, None))
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb.CommitRequest
Expand Down Expand Up @@ -1018,7 +1018,7 @@ def test_save_entity_wo_transaction_w_auto_id(self):
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'})
self.assertEqual(result, updated_key_pb)
self.assertEqual(result, (True, 1234))
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb.CommitRequest
Expand Down Expand Up @@ -1054,7 +1054,7 @@ def mutation(self):
conn.transaction(Xact())
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'})
self.assertEqual(result, True)
self.assertEqual(result, (False, None))
self.assertEqual(http._called_with, None)
mutation = conn.mutation()
self.assertEqual(len(mutation.upsert), 1)
Expand All @@ -1077,7 +1077,7 @@ def mutation(self):
conn.transaction(Xact())
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result = conn.save_entity(DATASET_ID, key_pb, {'foo': nested})
self.assertEqual(result, True)
self.assertEqual(result, (False, None))
self.assertEqual(http._called_with, None)
mutation = conn.mutation()
self.assertEqual(len(mutation.upsert), 1)
Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/test_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def test_save_w_returned_key_exclude_from_indexes(self):
key_pb.partition_id.dataset_id = _DATASET_ID
key_pb.path_element.add(kind=_KIND, id=_ID)
connection = _Connection()
connection._save_result = key_pb
connection._save_result = (True, _ID)
dataset = _Dataset(connection)
key = Key('KIND', dataset_id='DATASET')
entity = self._makeOne(dataset, exclude_from_indexes=['foo'])
Expand Down Expand Up @@ -287,12 +287,12 @@ def get_entities(self, keys):
return [self.get(key) for key in keys]

def allocate_ids(self, incomplete_key, num_ids):
return [incomplete_key.complete_key(i + 1) for i in range(num_ids)]
return [incomplete_key.completed_key(i + 1) for i in range(num_ids)]


class _Connection(object):
_transaction = _saved = _deleted = None
_save_result = True
_save_result = (False, None)

def transaction(self):
return self._transaction
Expand Down
81 changes: 0 additions & 81 deletions gcloud/datastore/test_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,87 +86,6 @@ def test_completed_key_on_complete(self):
key = self._makeOne('KIND', 1234)
self.assertRaises(ValueError, key.completed_key, 5678)

def test_compare_to_proto_incomplete_w_id(self):
_ID = 1234
key = self._makeOne('KIND')
pb = key.to_protobuf()
pb.path_element[0].id = _ID
new_key = key.compare_to_proto(pb)
self.assertFalse(new_key is key)
self.assertEqual(new_key.id, _ID)
self.assertEqual(new_key.name, None)

def test_compare_to_proto_incomplete_w_name(self):
_NAME = 'NAME'
key = self._makeOne('KIND')
pb = key.to_protobuf()
pb.path_element[0].name = _NAME
new_key = key.compare_to_proto(pb)
self.assertFalse(new_key is key)
self.assertEqual(new_key.id, None)
self.assertEqual(new_key.name, _NAME)

def test_compare_to_proto_incomplete_w_incomplete(self):
key = self._makeOne('KIND')
pb = key.to_protobuf()
new_key = key.compare_to_proto(pb)
self.assertTrue(new_key is key)

def test_compare_to_proto_incomplete_w_bad_path(self):
key = self._makeOne('KIND1', 1234, 'KIND2')
pb = key.to_protobuf()
pb.path_element[0].kind = 'NO_KIND'
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_w_id(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.path_element[0].id = 5678
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_w_name(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.path_element[0].name = 'NAME'
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_w_incomplete(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.path_element[0].ClearField('id')
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_diff_dataset(self):
key = self._makeOne('KIND', 1234, dataset_id='DATASET')
pb = key.to_protobuf()
pb.partition_id.dataset_id = 's~' + key.dataset_id
new_key = key.compare_to_proto(pb)
self.assertTrue(new_key is key)

def test_compare_to_proto_complete_bad_dataset(self):
key = self._makeOne('KIND', 1234, dataset_id='DATASET')
pb = key.to_protobuf()
pb.partition_id.dataset_id = 'BAD_PRE~' + key.dataset_id
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_valid_namespace(self):
key = self._makeOne('KIND', 1234, namespace='NAMESPACE')
pb = key.to_protobuf()
new_key = key.compare_to_proto(pb)
self.assertTrue(new_key is key)

def test_compare_to_proto_complete_namespace_unset_on_pb(self):
key = self._makeOne('KIND', 1234, namespace='NAMESPACE')
pb = key.to_protobuf()
pb.partition_id.ClearField('namespace')
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_compare_to_proto_complete_namespace_unset_on_key(self):
key = self._makeOne('KIND', 1234)
pb = key.to_protobuf()
pb.partition_id.namespace = 'NAMESPACE'
self.assertRaises(ValueError, key.compare_to_proto, pb)

def test_to_protobuf_defaults(self):
from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB
_KIND = 'KIND'
Expand Down

2 comments on commit a257a9c

@tseaver
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I like seeing all those deleted lines: it feels like we found a better API.

@dhermes
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Indeed

Please sign in to comment.