Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Address seventh part of 451: Make dataset_id required on Key. #463

Merged
merged 2 commits into from
Dec 31, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 4 additions & 8 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,8 +241,7 @@ def lookup(self, dataset_id, key_pbs,
if single_key:
key_pbs = [key_pbs]

for key_pb in key_pbs:
lookup_request.key.add().CopyFrom(key_pb)
helpers._add_keys_to_request(lookup_request.key, key_pbs)

results, missing_found, deferred_found = self._lookup(
lookup_request, dataset_id, deferred is not None)
Expand Down Expand Up @@ -417,8 +416,7 @@ def allocate_ids(self, dataset_id, key_pbs):
:returns: An equal number of keys, with IDs filled in by the backend.
"""
request = datastore_pb.AllocateIdsRequest()
for key_pb in key_pbs:
request.key.add().CopyFrom(key_pb)
helpers._add_keys_to_request(request.key, key_pbs)
# Nothing to do with this response, so just execute the method.
response = self._rpc(dataset_id, 'allocateIds', request,
datastore_pb.AllocateIdsResponse)
Expand Down Expand Up @@ -451,6 +449,7 @@ def save_entity(self, dataset_id, key_pb, properties,
either `None` or an integer that has been assigned.
"""
mutation = self.mutation()
key_pb = helpers._prepare_key_for_request(key_pb)

# If the Key is complete, we should upsert
# instead of using insert_auto_id.
Expand Down Expand Up @@ -515,10 +514,7 @@ def delete_entities(self, dataset_id, key_pbs):
:returns: True
"""
mutation = self.mutation()

for key_pb in key_pbs:
delete = mutation.delete.add()
delete.CopyFrom(key_pb)
helpers._add_keys_to_request(mutation.delete, key_pbs)

if not self.transaction():
self.commit(dataset_id, mutation)
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
# _implicit_environ._DatastoreBase to avoid split MRO.
self._dataset = dataset or _implicit_environ.DATASET
if kind:
self._key = Key(kind)
self._key = Key(kind, dataset_id=self.dataset().id())
else:
self._key = None
self._exclude_from_indexes = set(exclude_from_indexes)
Expand Down
42 changes: 42 additions & 0 deletions gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import pytz
import six

from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import Key

Expand Down Expand Up @@ -259,3 +260,44 @@ def _set_protobuf_value(value_pb, val):
_set_protobuf_value(i_pb, item)
else: # scalar, just assign
setattr(value_pb, attr, val)


def _prepare_key_for_request(key_pb):
"""Add protobuf keys to a request object.

:type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pb: A key to be added to a request.

:rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if key_pb.partition_id.HasField('dataset_id'):
# We remove the dataset_id from the protobuf. This is because
# the backend fails a request if the key contains un-prefixed
# dataset ID. The backend fails because requests to
# /datastore/.../datasets/foo/...
# and
# /datastore/.../datasets/s~foo/...
# both go to the datastore given by 's~foo'. So if the key
# protobuf in the request body has dataset_id='foo', the
# backend will reject since 'foo' != 's~foo'.
new_key_pb = datastore_pb.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('dataset_id')
key_pb = new_key_pb
return key_pb


def _add_keys_to_request(request_field_pb, key_pbs):
"""Add protobuf keys to a request object.

:type request_field_pb: `RepeatedCompositeFieldContainer`
:param request_field_pb: A repeated proto field that contains keys.

:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys to add to a request.
"""
for key_pb in key_pbs:
key_pb = _prepare_key_for_request(key_pb)
request_field_pb.add().CopyFrom(key_pb)
38 changes: 22 additions & 16 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from itertools import izip
import six

from gcloud.datastore import _implicit_environ
from gcloud.datastore import datastore_v1_pb2 as datastore_pb


Expand Down Expand Up @@ -56,24 +57,31 @@ def __init__(self, *path_args, **kwargs):
passed as a keyword argument.

:type dataset_id: string
:param dataset_id: The dataset ID associated with the key. Can only be
passed as a keyword argument.

# This note will be obsolete by the end of #451.

.. note::
The key's ``_dataset_id`` field must be None for keys created
by application code. The
:func:`gcloud.datastore.helpers.key_from_protobuf` factory
will be set the field to an appropriate value for keys
returned from the datastore backend. The application
**must** treat any value set by the back-end as opaque.
:param dataset_id: The dataset ID associated with the key. Required,
unless the implicit dataset ID has been set. Can
only be passed as a keyword argument.
"""
self._path = self._parse_path(path_args)
self._flat_path = path_args
self._parent = None
self._namespace = kwargs.get('namespace')
self._dataset_id = kwargs.get('dataset_id')
self._validate_dataset_id()

def _validate_dataset_id(self):
"""Ensures the dataset ID is set.

If unset, attempts to imply the ID from the environment.

:raises: `ValueError` if there is no `dataset_id` and none
can be implied.
"""
if self._dataset_id is None:
if _implicit_environ.DATASET is not None:
# This assumes DATASET.id() is not None.
self._dataset_id = _implicit_environ.DATASET.id()
else:
raise ValueError('A Key must have a dataset ID set.')

@staticmethod
def _parse_path(path_args):
Expand Down Expand Up @@ -160,9 +168,7 @@ def to_protobuf(self):
:returns: The Protobuf representing the key.
"""
key = datastore_pb.Key()

if self.dataset_id is not None:
key.partition_id.dataset_id = self.dataset_id
key.partition_id.dataset_id = self.dataset_id

if self.namespace:
key.partition_id.namespace = self.namespace
Expand Down Expand Up @@ -297,4 +303,4 @@ def parent(self):
return self._parent

def __repr__(self):
return '<Key%s>' % self.path
return '<Key%s, dataset=%s>' % (self.path, self.dataset_id)
12 changes: 10 additions & 2 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,14 @@ def filter(self, property_name, operator, value):
property_filter.operator = pb_op_enum

# Set the value to filter on based on the type.
helpers._set_protobuf_value(property_filter.value, value)
if property_name == '__key__':
if not isinstance(value, Key):
raise TypeError('__key__ query requires a Key instance.')
key_pb = value.to_protobuf()
property_filter.value.key_value.CopyFrom(
helpers._prepare_key_for_request(key_pb))
else:
helpers._set_protobuf_value(property_filter.value, value)
return clone

def ancestor(self, ancestor):
Expand Down Expand Up @@ -216,7 +223,8 @@ def ancestor(self, ancestor):
ancestor_filter = composite_filter.filter.add().property_filter
ancestor_filter.property.name = '__key__'
ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR
ancestor_filter.value.key_value.CopyFrom(ancestor.to_protobuf())
ancestor_pb = helpers._prepare_key_for_request(ancestor.to_protobuf())
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)

return clone

Expand Down
46 changes: 29 additions & 17 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def test_lookup_single_key_empty_response(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0], key_pb)
_compare_key_pb_after_request(self, key_pb, keys[0])

def test_lookup_single_key_empty_response_w_eventual(self):
from gcloud.datastore.connection import datastore_pb
Expand All @@ -261,7 +261,7 @@ def test_lookup_single_key_empty_response_w_eventual(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0], key_pb)
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(request.read_options.read_consistency,
datastore_pb.ReadOptions.EVENTUAL)
self.assertEqual(request.read_options.transaction, '')
Expand Down Expand Up @@ -301,7 +301,7 @@ def test_lookup_single_key_empty_response_w_transaction(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0], key_pb)
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(request.read_options.transaction, TRANSACTION)

def test_lookup_single_key_nonempty_response(self):
Expand Down Expand Up @@ -333,7 +333,7 @@ def test_lookup_single_key_nonempty_response(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 1)
self.assertEqual(keys[0], key_pb)
_compare_key_pb_after_request(self, key_pb, keys[0])

def test_lookup_multiple_keys_empty_response(self):
from gcloud.datastore.connection import datastore_pb
Expand All @@ -360,8 +360,8 @@ def test_lookup_multiple_keys_empty_response(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 2)
self.assertEqual(keys[0], key_pb1)
self.assertEqual(keys[1], key_pb2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])

def test_lookup_multiple_keys_w_missing(self):
from gcloud.datastore.connection import datastore_pb
Expand Down Expand Up @@ -396,8 +396,8 @@ def test_lookup_multiple_keys_w_missing(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 2)
self.assertEqual(keys[0], key_pb1)
self.assertEqual(keys[1], key_pb2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])

def test_lookup_multiple_keys_w_missing_non_empty(self):
DATASET_ID = 'DATASET'
Expand Down Expand Up @@ -444,8 +444,8 @@ def test_lookup_multiple_keys_w_deferred(self):
request.ParseFromString(cw['body'])
keys = list(request.key)
self.assertEqual(len(keys), 2)
self.assertEqual(keys[0], key_pb1)
self.assertEqual(keys[1], key_pb2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])

def test_lookup_multiple_keys_w_deferred_non_empty(self):
DATASET_ID = 'DATASET'
Expand Down Expand Up @@ -500,8 +500,8 @@ def test_lookup_multiple_keys_w_deferred_from_backend_but_not_passed(self):
request.ParseFromString(cw[0]['body'])
keys = list(request.key)
self.assertEqual(len(keys), 2)
self.assertEqual(keys[0], key_pb1)
self.assertEqual(keys[1], key_pb2)
_compare_key_pb_after_request(self, key_pb1, keys[0])
_compare_key_pb_after_request(self, key_pb2, keys[1])

self._verifyProtobufCall(cw[1], URI, conn)
request.ParseFromString(cw[1]['body'])
Expand Down Expand Up @@ -907,7 +907,9 @@ def test_allocate_ids_non_empty(self):
rq_class = datastore_pb.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(list(request.key), before_key_pbs)
self.assertEqual(len(request.key), len(before_key_pbs))
for key_before, key_after in zip(before_key_pbs, request.key):
_compare_key_pb_after_request(self, key_before, key_after)

def test_save_entity_wo_transaction_w_upsert(self):
from gcloud.datastore.connection import datastore_pb
Expand Down Expand Up @@ -938,7 +940,7 @@ def test_save_entity_wo_transaction_w_upsert(self):
upserts = list(mutation.upsert)
self.assertEqual(len(upserts), 1)
upsert = upserts[0]
self.assertEqual(upsert.key, key_pb)
_compare_key_pb_after_request(self, key_pb, upsert.key)
props = list(upsert.property)
self.assertEqual(len(props), 1)
self.assertEqual(props[0].name, 'foo')
Expand Down Expand Up @@ -979,7 +981,7 @@ def test_save_entity_w_exclude_from_indexes(self):
upserts = list(mutation.upsert)
self.assertEqual(len(upserts), 1)
upsert = upserts[0]
self.assertEqual(upsert.key, key_pb)
_compare_key_pb_after_request(self, key_pb, upsert.key)
props = sorted(upsert.property,
key=operator.attrgetter('name'),
reverse=True)
Expand Down Expand Up @@ -1028,7 +1030,7 @@ def test_save_entity_wo_transaction_w_auto_id(self):
mutation = request.mutation
inserts = list(mutation.insert_auto_id)
insert = inserts[0]
self.assertEqual(insert.key, key_pb)
_compare_key_pb_after_request(self, key_pb, insert.key)
props = list(insert.property)
self.assertEqual(len(props), 1)
self.assertEqual(props[0].name, 'foo')
Expand Down Expand Up @@ -1112,7 +1114,7 @@ def test_delete_entities_wo_transaction(self):
deletes = list(mutation.delete)
self.assertEqual(len(deletes), 1)
delete = deletes[0]
self.assertEqual(delete, key_pb)
_compare_key_pb_after_request(self, key_pb, delete)
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)

def test_delete_entities_w_transaction(self):
Expand Down Expand Up @@ -1168,3 +1170,13 @@ def __init__(self, id):

def id(self):
return self._id


def _compare_key_pb_after_request(test, key_before, key_after):
test.assertFalse(key_after.partition_id.HasField('dataset_id'))
test.assertEqual(key_before.partition_id.namespace,
key_after.partition_id.namespace)
test.assertEqual(len(key_before.path_element),
len(key_after.path_element))
for elt1, elt2 in zip(key_before.path_element, key_after.path_element):
test.assertEqual(elt1, elt2)
9 changes: 9 additions & 0 deletions gcloud/datastore/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,15 @@ def test_get_entity_hit(self):
self.assertEqual(list(result), ['foo'])
self.assertEqual(result['foo'], 'Foo')

def test_get_entity_bad_type(self):
DATASET_ID = 'DATASET'
connection = _Connection()
dataset = self._makeOne(DATASET_ID, connection)
with self.assertRaises(AttributeError):
dataset.get_entity(None)
with self.assertRaises(AttributeError):
dataset.get_entity([])

def test_get_entities_miss(self):
from gcloud.datastore.key import Key
DATASET_ID = 'DATASET'
Expand Down
4 changes: 2 additions & 2 deletions gcloud/datastore/test_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def test_key_getter(self):
entity = self._makeOne()
key = entity.key()
self.assertIsInstance(key, Key)
self.assertEqual(key.dataset_id, None)
self.assertEqual(key.dataset_id, entity.dataset().id())
self.assertEqual(key.kind, _KIND)

def test_key_setter(self):
Expand Down Expand Up @@ -196,7 +196,7 @@ def test_save_w_returned_key_exclude_from_indexes(self):
connection = _Connection()
connection._save_result = (True, _ID)
dataset = _Dataset(connection)
key = Key('KIND', dataset_id='DATASET')
key = Key('KIND', dataset_id=_DATASET_ID)
entity = self._makeOne(dataset, exclude_from_indexes=['foo'])
entity.key(key)
entity['foo'] = 'Foo'
Expand Down
Loading