diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index ee718e87f4b6..1dc202ee8c39 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -44,7 +44,10 @@ which represents a lookup or search over the rows in the datastore. """ +import os + from gcloud import credentials +from gcloud.datastore import _implicit_environ from gcloud.datastore.connection import Connection @@ -52,6 +55,23 @@ 'https://www.googleapis.com/auth/userinfo.email') """The scope required for authenticating as a Cloud Datastore consumer.""" +_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' + + +def _set_dataset_from_environ(): + """Determines auth settings from local enviroment. + + Currently only supports enviroment variable but will implicitly + support App Engine, Compute Engine and other environments in + the future. + + Local environment variable used is: + - GCLOUD_DATASET_ID + """ + local_dataset_id = os.getenv(_DATASET_ENV_VAR_NAME) + if local_dataset_id is not None: + _implicit_environ.DATASET = get_dataset(local_dataset_id) + def get_connection(): """Shortcut method to establish a connection to the Cloud Datastore. @@ -97,3 +117,58 @@ def get_dataset(dataset_id): """ connection = get_connection() return connection.dataset(dataset_id) + + +def _require_dataset(): + """Convenience method to ensure DATASET is set. + + :rtype: :class:`gcloud.datastore.dataset.Dataset` + :returns: A dataset based on the current environment. + :raises: :class:`EnvironmentError` if DATASET is not set. + """ + if _implicit_environ.DATASET is None: + raise EnvironmentError('Dataset could not be implied.') + return _implicit_environ.DATASET + + +def get_entity(key): + """Retrieves entity from implicit dataset, along with its attributes. + + :type key: :class:`gcloud.datastore.key.Key` + :param key: The name of the item to retrieve. + + :rtype: :class:`gcloud.datastore.entity.Entity` or ``None`` + :return: The requested entity, or ``None`` if there was no match found. + """ + return _require_dataset().get_entity(key) + + +def get_entities(keys): + """Retrieves entities from implied dataset, along with their attributes. + + :type keys: list of :class:`gcloud.datastore.key.Key` + :param keys: The name of the item to retrieve. + + :rtype: list of :class:`gcloud.datastore.entity.Entity` + :return: The requested entities. + """ + return _require_dataset().get_entities(keys) + + +def allocate_ids(incomplete_key, num_ids): + """Allocates a list of IDs from a partial key. + + :type incomplete_key: A :class:`gcloud.datastore.key.Key` + :param incomplete_key: The partial key to use as base for allocated IDs. + + :type num_ids: A :class:`int`. + :param num_ids: The number of IDs to allocate. + + :rtype: list of :class:`gcloud.datastore.key.Key` + :return: The (complete) keys allocated with `incomplete_key` as root. + """ + return _require_dataset().allocate_ids(incomplete_key, num_ids) + + +# Set DATASET if it can be implied from the environment. +_set_dataset_from_environ() diff --git a/gcloud/datastore/_implicit_environ.py b/gcloud/datastore/_implicit_environ.py new file mode 100644 index 000000000000..088067831820 --- /dev/null +++ b/gcloud/datastore/_implicit_environ.py @@ -0,0 +1,24 @@ +"""Module to provide implicit behavior based on enviroment. + +Acts as a mutable namespace to allow the datastore package to +imply the current dataset from the enviroment. + +Also provides a base class for classes in the `datastore` package +which could utilize the implicit enviroment. +""" + + +DATASET = None +"""Module global to allow persistent implied dataset from enviroment.""" + + +class _DatastoreBase(object): + """Base for all classes in the datastore package. + + Uses the implicit DATASET object as a default dataset attached + to the instances being created. Stores the dataset passed in + on the protected (i.e. non-public) attribute `_dataset`. + """ + + def __init__(self, dataset=None): + self._dataset = dataset or DATASET diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 9642be52afc1..1b6a6b80c2fd 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -241,8 +241,7 @@ def lookup(self, dataset_id, key_pbs, if single_key: key_pbs = [key_pbs] - for key_pb in key_pbs: - lookup_request.key.add().CopyFrom(key_pb) + helpers._add_keys_to_request(lookup_request.key, key_pbs) results, missing_found, deferred_found = self._lookup( lookup_request, dataset_id, deferred is not None) @@ -417,8 +416,7 @@ def allocate_ids(self, dataset_id, key_pbs): :returns: An equal number of keys, with IDs filled in by the backend. """ request = datastore_pb.AllocateIdsRequest() - for key_pb in key_pbs: - request.key.add().CopyFrom(key_pb) + helpers._add_keys_to_request(request.key, key_pbs) # Nothing to do with this response, so just execute the method. response = self._rpc(dataset_id, 'allocateIds', request, datastore_pb.AllocateIdsResponse) @@ -444,8 +442,14 @@ def save_entity(self, dataset_id, key_pb, properties, :type exclude_from_indexes: sequence of str :param exclude_from_indexes: Names of properties *not* to be indexed. + + :rtype: bool or :class:`gcloud.datastore.datastore_v1_pb2.Key` + :returns: True if the save succeeds, unless a new ID has been + automatically allocated. In the auto ID case, the newly + created key protobuf is returned. """ mutation = self.mutation() + key_pb = helpers._prepare_key_for_request(key_pb) # If the Key is complete, we should upsert # instead of using insert_auto_id. @@ -506,10 +510,7 @@ def delete_entities(self, dataset_id, key_pbs): :returns: True """ mutation = self.mutation() - - for key_pb in key_pbs: - delete = mutation.delete.add() - delete.CopyFrom(key_pb) + helpers._add_keys_to_request(mutation.delete, key_pbs) if not self.transaction(): self.commit(dataset_id, mutation) diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 7903e81290fe..10b703baf39e 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -136,7 +136,7 @@ def get_entity(self, key_or_path): if isinstance(key_or_path, Key): entities = self.get_entities([key_or_path]) else: - key = Key.from_path(*key_or_path) + key = Key(*key_or_path) entities = self.get_entities([key]) if entities: @@ -196,7 +196,7 @@ def allocate_ids(self, incomplete_key, num_ids): :return: The (complete) keys allocated with `incomplete_key` as root. :raises: `ValueError` if `incomplete_key` is not a partial key. """ - if not incomplete_key.is_partial(): + if not incomplete_key.is_partial: raise ValueError(('Key is not partial.', incomplete_key)) incomplete_key_pb = incomplete_key.to_protobuf() @@ -206,5 +206,5 @@ def allocate_ids(self, incomplete_key, num_ids): self.id(), incomplete_key_pbs) allocated_ids = [allocated_key_pb.path_element[-1].id for allocated_key_pb in allocated_key_pbs] - return [incomplete_key.id(allocated_id) + return [incomplete_key.complete_key(allocated_id) for allocated_id in allocated_ids] diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 6411d3ee64f6..ccc937619261 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -14,6 +14,7 @@ """Class for representing a single entity in the Cloud Datastore.""" +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key @@ -26,7 +27,7 @@ class NoDataset(RuntimeError): """Exception raised by Entity methods which require a dataset.""" -class Entity(dict): +class Entity(_implicit_environ._DatastoreBase): """Entities are akin to rows in a relational database An entity storing the actual instance of data. @@ -41,9 +42,9 @@ class Entity(dict): Entities in this API act like dictionaries with extras built in that allow you to delete or persist the data stored on the entity. - Entities are mutable and act like a subclass of a dictionary. - This means you could take an existing entity and change the key - to duplicate the object. + Entities are mutable and properties can be set, updated and deleted + like keys in a dictionary. This means you could take an existing entity + and change the key to duplicate the object. Use :func:`gcloud.datastore.dataset.Dataset.get_entity` to retrieve an existing entity. @@ -59,10 +60,9 @@ class Entity(dict): >>> entity - And you can convert an entity to a regular Python dictionary with the - `dict` builtin: + And you can convert an entity to a regular Python dictionary - >>> dict(entity) + >>> entity.to_dict() {'age': 20, 'name': 'JJ'} .. note:: @@ -94,14 +94,60 @@ class Entity(dict): """ def __init__(self, dataset=None, kind=None, exclude_from_indexes=()): - super(Entity, self).__init__() - self._dataset = dataset + super(Entity, self).__init__(dataset=dataset) + self._data = {} if kind: - self._key = Key().kind(kind) + # This is temporary since the dataset will eventually be 100% + # removed from the Entity and the Dataset class may be + # destroyed. + self._key = Key(kind, dataset_id=self.dataset().id()) else: self._key = None self._exclude_from_indexes = set(exclude_from_indexes) + def __getitem__(self, item_name): + return self._data[item_name] + + def __setitem__(self, item_name, value): + self._data[item_name] = value + + def __delitem__(self, item_name): + del self._data[item_name] + + def clear_properties(self): + """Clear all properties from the Entity.""" + self._data.clear() + + def update_properties(self, *args, **kwargs): + """Allows entity properties to be updated in bulk. + + Either takes a single dictionary or uses the keywords passed in. + + >>> entity + + >>> entity.update_properties(prop1=u'bar', prop2=u'baz') + >>> entity + + >>> entity.update_properties({'prop1': 0, 'prop2': 1}) + >>> entity + + + :raises: `TypeError` a mix of positional and keyword arguments are + used or if more than one positional argument is used. + """ + if args and kwargs or len(args) > 1: + raise TypeError('Only a single dictionary or keyword arguments ' + 'may be used') + if args: + dict_arg, = args + self._data.update(dict_arg) + else: + self._data.update(kwargs) + + def to_dict(self): + """Converts the stored properties to a dictionary.""" + return self._data.copy() + def dataset(self): """Get the :class:`.dataset.Dataset` in which this entity belongs. @@ -150,7 +196,7 @@ def kind(self): """ if self._key: - return self._key.kind() + return self._key.kind def exclude_from_indexes(self): """Names of fields which are *not* to be indexed for this entity. @@ -215,7 +261,7 @@ def reload(self): entity = dataset.get_entity(key.to_protobuf()) if entity: - self.update(entity) + self.update_properties(entity.to_dict()) return self def save(self): @@ -241,29 +287,18 @@ def save(self): key_pb = connection.save_entity( dataset_id=dataset.id(), key_pb=key.to_protobuf(), - properties=dict(self), + properties=self.to_dict(), exclude_from_indexes=self.exclude_from_indexes()) # If we are in a transaction and the current entity needs an # automatically assigned ID, tell the transaction where to put that. transaction = connection.transaction() - if transaction and key.is_partial(): + if transaction and key.is_partial: transaction.add_auto_id_entity(self) if isinstance(key_pb, datastore_pb.Key): - # Update the path (which may have been altered). - # NOTE: The underlying namespace can't have changed in a save(). - # The value of the dataset ID may have changed from implicit - # (i.e. None, with the ID implied from the dataset.Dataset - # object associated with the Entity/Key), but if it was - # implicit before the save() we leave it as implicit. - path = [] - for element in key_pb.path_element: - key_part = {} - for descriptor, value in element._fields.items(): - key_part[descriptor.name] = value - path.append(key_part) - self._key = key.path(path) + # Update the key (which may have been altered). + self._key = self.key().compare_to_proto(key_pb) return self @@ -284,7 +319,6 @@ def delete(self): def __repr__(self): if self._key: - return '' % (self._key.path(), - super(Entity, self).__repr__()) + return '' % (self._key.path, self._data) else: - return '' % (super(Entity, self).__repr__()) + return '' % (self._data,) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 54c8ab8855f0..9d2880548410 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -25,6 +25,7 @@ import pytz import six +from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key @@ -65,19 +66,15 @@ def key_from_protobuf(pb): :rtype: :class:`gcloud.datastore.key.Key` :returns: a new `Key` instance """ - path = [] + path_args = [] for element in pb.path_element: - element_dict = {'kind': element.kind} - + path_args.append(element.kind) if element.HasField('id'): - element_dict['id'] = element.id - + path_args.append(element.id) # This is safe: we expect proto objects returned will only have # one of `name` or `id` set. if element.HasField('name'): - element_dict['name'] = element.name - - path.append(element_dict) + path_args.append(element.name) dataset_id = None if pb.partition_id.HasField('dataset_id'): @@ -86,7 +83,7 @@ def key_from_protobuf(pb): if pb.partition_id.HasField('namespace'): namespace = pb.partition_id.namespace - return Key(path, namespace, dataset_id) + return Key(*path_args, namespace=namespace, dataset_id=dataset_id) def _pb_attr_value(val): @@ -252,7 +249,7 @@ def _set_protobuf_value(value_pb, val): key = val.key() if key is not None: e_pb.key.CopyFrom(key.to_protobuf()) - for item_key, value in val.items(): + for item_key, value in val.to_dict().items(): p_pb = e_pb.property.add() p_pb.name = item_key _set_protobuf_value(p_pb.value, value) @@ -263,3 +260,44 @@ def _set_protobuf_value(value_pb, val): _set_protobuf_value(i_pb, item) else: # scalar, just assign setattr(value_pb, attr, val) + + +def _prepare_key_for_request(key_pb): + """Add protobuf keys to a request object. + + :type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param key_pb: A key to be added to a request. + + :rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :returns: A key which will be added to a request. It will be the + original if nothing needs to be changed. + """ + if key_pb.partition_id.HasField('dataset_id'): + # We remove the dataset_id from the protobuf. This is because + # the backend fails a request if the key contains un-prefixed + # dataset ID. The backend fails because requests to + # /datastore/.../datasets/foo/... + # and + # /datastore/.../datasets/s~foo/... + # both go to the datastore given by 's~foo'. So if the key + # protobuf in the request body has dataset_id='foo', the + # backend will reject since 'foo' != 's~foo'. + new_key_pb = datastore_pb.Key() + new_key_pb.CopyFrom(key_pb) + new_key_pb.partition_id.ClearField('dataset_id') + key_pb = new_key_pb + return key_pb + + +def _add_keys_to_request(request_field_pb, key_pbs): + """Add protobuf keys to a request object. + + :type request_field_pb: `RepeatedCompositeFieldContainer` + :param request_field_pb: A repeated proto field that contains keys. + + :type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param key_pbs: The keys to add to a request. + """ + for key_pb in key_pbs: + key_pb = _prepare_key_for_request(key_pb) + request_field_pb.add().CopyFrom(key_pb) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index f187f17aeaad..7c0997f85dbc 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -19,39 +19,108 @@ import six +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb class Key(object): """An immutable representation of a datastore Key. + To create a basic key: + + >>> Key('EntityKind', 1234) + + >>> Key('EntityKind', 'foo') + + + To create a key with a parent: + + >>> Key('Parent', 'foo', 'Child', 1234) + + + To create a paritial key: + + >>> Key('Parent', 'foo', 'Child') + + .. automethod:: __init__ """ - def __init__(self, path=None, namespace=None, dataset_id=None): + def __init__(self, *path_args, **kwargs): """Constructor / initializer for a key. - :type namespace: :class:`str` - :param namespace: A namespace identifier for the key. + :type path_args: tuple of strings and ints + :param path_args: May represent a partial (odd length) or full (even + length) key path. - :type path: sequence of dicts - :param path: Each dict must have keys 'kind' (a string) and optionally - 'name' (a string) or 'id' (an integer). + :type namespace: :class:`str` + :param namespace: A namespace identifier for the key. Can only be + passed as a keyword argument. :type dataset_id: string - :param dataset: The dataset ID assigned by back-end for the key. + :param dataset_id: The dataset ID associated with the key. This is + required. Can only be passed as a keyword argument. + """ + self._path = self._parse_path(path_args) + self._flat_path = path_args + self._parent = None # DJH: Add parent to constructor. + self._namespace = kwargs.get('namespace') + self._dataset_id = kwargs.get('dataset_id') + self._validate_dataset_id() + + def _validate_dataset_id(self): + """Ensures the dataset ID is set. - .. note:: - The key's ``_dataset_id`` field must be None for keys created - by application code. The - :func:`gcloud.datastore.helpers.key_from_protobuf` factory - will be set the field to an appropriate value for keys - returned from the datastore backend. The application - **must** treat any value set by the back-end as opaque. + If unset, attempts to imply the ID from the environment. + + :raises: `ValueError` if there is no `dataset_id` and none + can be implied. + """ + if self._dataset_id is None: + if _implicit_environ.DATASET is not None: + # This assumes DATASET.id() is not None. + self._dataset_id = _implicit_environ.DATASET.id() + else: + raise ValueError('A Key must have a dataset ID set.') + + @staticmethod + def _parse_path(path_args): + """Parses positional arguments into key path with kinds and IDs. + + :rtype: list of dict + :returns: A list of key parts with kind and id or name set. + :raises: `ValueError` if there are no `path_args`, if one of the + kinds is not a string or if one of the IDs/names is not + a string or an integer. """ - self._path = path or [{'kind': ''}] - self._namespace = namespace - self._dataset_id = dataset_id + if len(path_args) == 0: + raise ValueError('Key path must not be empty.') + + kind_list = path_args[::2] + id_or_name_list = path_args[1::2] + if len(path_args) % 2 == 1: + # Add dummy None to be ignored below. + id_or_name_list += (None,) + + result = [] + for kind, id_or_name in izip(kind_list, id_or_name_list): + curr_key_part = {} + if isinstance(kind, six.string_types): + curr_key_part['kind'] = kind + else: + raise ValueError(kind, 'Kind was not a string.') + + if isinstance(id_or_name, six.string_types): + curr_key_part['name'] = id_or_name + elif isinstance(id_or_name, six.integer_types): + curr_key_part['id'] = id_or_name + elif id_or_name is not None: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + result.append(curr_key_part) + + return result def _clone(self): """Duplicates the Key. @@ -61,10 +130,118 @@ def _clone(self): which we don't want to lose. :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new `Key` instance + :returns: A new `Key` instance with the same data as the current one. """ return copy.deepcopy(self) + def complete_key(self, id_or_name): + """Creates new key from existing partial key by adding final ID/name. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: A new `Key` instance with the same data as the current one + and an extra ID or name added. + :raises: `ValueError` if the current key is not partial or if + `id_or_name` is not a string or integer. + """ + if not self.is_partial: + raise ValueError('Only a partial key can be completed.') + + id_or_name_key = None + if isinstance(id_or_name, six.string_types): + id_or_name_key = 'name' + elif isinstance(id_or_name, six.integer_types): + id_or_name_key = 'id' + else: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + new_key = self._clone() + new_key._path[-1][id_or_name_key] = id_or_name + new_key._flat_path += (id_or_name,) + return new_key + + def _validate_protobuf_dataset_id(self, protobuf): + """Checks that dataset ID on protobuf matches current one. + + The value of the dataset ID may have changed from unprefixed + (e.g. 'foo') to prefixed (e.g. 's~foo' or 'e~foo'). + + :type protobuf: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param protobuf: A protobuf representation of the key. Expected to be + returned after a datastore operation. + + :rtype: :class:`str` + """ + proto_dataset_id = protobuf.partition_id.dataset_id + if proto_dataset_id == self.dataset_id: + return + + # Since they don't match, we check to see if `proto_dataset_id` has a + # prefix. + unprefixed = None + prefix = proto_dataset_id[:2] + if prefix in ('s~', 'e~'): + unprefixed = proto_dataset_id[2:] + + if unprefixed != self.dataset_id: + raise ValueError('Dataset ID on protobuf does not match.', + proto_dataset_id, self.dataset_id) + + def compare_to_proto(self, protobuf): + """Checks current key against a protobuf; updates if partial. + + If the current key is partial, returns a new key that has been + completed otherwise returns the current key. + + :type protobuf: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param protobuf: A protobuf representation of the key. Expected to be + returned after a datastore operation. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: The current key if not partial. + :raises: `ValueError` if the namespace or dataset ID of `protobuf` + don't match the current values or if the path from `protobuf` + doesn't match. + """ + if self.namespace is None: + if protobuf.partition_id.HasField('namespace'): + raise ValueError('Namespace unset on key but set on protobuf.') + elif protobuf.partition_id.namespace != self.namespace: + raise ValueError('Namespace on protobuf does not match.', + protobuf.partition_id.namespace, self.namespace) + + # Check that dataset IDs match. + self._validate_protobuf_dataset_id(protobuf) + + path = [] + # DJH: This happens in helpers.py too, should be a method. + for element in protobuf.path_element: + key_part = {} + for descriptor, value in element._fields.items(): + key_part[descriptor.name] = value + path.append(key_part) + + if path == self.path: + return self + + if not self.is_partial: + raise ValueError('Proto path does not match completed key.', + path, self.path) + + last_part = path[-1] + id_or_name = None + if 'id' in last_part: + id_or_name = last_part.pop('id') + elif 'name' in last_part: + id_or_name = last_part.pop('name') + + # We have edited path by popping from the last part, so check again. + if path != self.path: + raise ValueError('Proto path does not match partial key.', + path, self.path) + + return self.complete_key(id_or_name) + def to_protobuf(self): """Return a protobuf corresponding to the key. @@ -72,14 +249,12 @@ def to_protobuf(self): :returns: The Protobuf representing the key. """ key = datastore_pb.Key() + key.partition_id.dataset_id = self.dataset_id - if self._dataset_id is not None: - key.partition_id.dataset_id = self._dataset_id - - if self._namespace: - key.partition_id.namespace = self._namespace + if self.namespace: + key.partition_id.namespace = self.namespace - for item in self.path(): + for item in self.path: element = key.path_element.add() if 'kind' in item: element.kind = item['kind'] @@ -90,155 +265,122 @@ def to_protobuf(self): return key - @classmethod - def from_path(cls, *args, **kwargs): - """Factory method for creating a key based on a path. - - :type args: :class:`tuple` - :param args: sequence of even length, where the first of each pair is a - string representing the 'kind' of the path element, and - the second of the pair is either a string (for the path - element's name) or an integer (for its id). - - :type kwargs: :class:`dict` - :param kwargs: Other named parameters which can be passed to - :func:`Key.__init__`. - - :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new :class:`Key` instance - """ - if len(args) % 2: - raise ValueError('Must pass an even number of args.') - - path = [] - items = iter(args) - - for kind, id_or_name in izip(items, items): - entry = {'kind': kind} - if isinstance(id_or_name, six.string_types): - entry['name'] = id_or_name - else: - entry['id'] = id_or_name - path.append(entry) - - kwargs['path'] = path - return cls(**kwargs) - + @property def is_partial(self): - """Boolean test: is the key fully mapped onto a backend entity? + """Boolean indicating if the key has an ID (or name). :rtype: :class:`bool` :returns: True if the last element of the key's path does not have an 'id' or a 'name'. """ - return self.id_or_name() is None + return self.id_or_name is None - def namespace(self, namespace=None): - """Namespace setter / getter. - - :type namespace: :class:`str` - :param namespace: A namespace identifier for the key. + @property + def namespace(self): + """Namespace getter. - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given namespace - (setter); or self's namespace (getter). + :rtype: :class:`str` + :returns: The namespace of the current key. """ - if namespace: - clone = self._clone() - clone._namespace = namespace - return clone - else: - return self._namespace - - def path(self, path=None): - """Path setter / getter. + return self._namespace - :type path: sequence of dicts - :param path: Each dict must have keys 'kind' (a string) and optionally - 'name' (a string) or 'id' (an integer). + @property + def path(self): + """Path getter. - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given path (setter); - or self's path (getter). + :rtype: :class:`str` + :returns: The (key) path of the current key. """ - if path: - clone = self._clone() - clone._path = path - return clone - else: - return self._path + # DJH: Maybe this should be a copy. + return self._path - def kind(self, kind=None): - """Kind setter / getter. Based on the last element of path. + @property + def flat_path(self): + """Getter for the key path as a tuple. - :type kind: :class:`str` - :param kind: The new kind for the key. - - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given kind (setter); - or self's kind (getter). + :rtype: :class:`tuple` of string and int + :returns: The tuple of elements in the path. """ - if kind: - clone = self._clone() - clone._path[-1]['kind'] = kind - return clone - elif self.path(): - return self._path[-1]['kind'] - - def id(self, id_to_set=None): - """ID setter / getter. Based on the last element of path. + return self._flat_path - :type id_to_set: :class:`int` - :param id_to_set: The new ID for the key. + @property + def kind(self): + """Kind getter. Based on the last element of path. - :rtype: :class:`Key` (for setter); or :class:`int` (for getter) - :returns: a new key, cloned from self., with the given id (setter); - or self's id (getter). + :rtype: :class:`str` + :returns: The kind of the current key. """ - if id_to_set: - clone = self._clone() - clone._path[-1]['id'] = id_to_set - return clone - elif self.path(): - return self._path[-1].get('id') + return self.path[-1]['kind'] - def name(self, name=None): - """Name setter / getter. Based on the last element of path. + @property + def id(self): + """ID getter. Based on the last element of path. - :type kind: :class:`str` - :param kind: The new name for the key. + :rtype: :class:`int` + :returns: The (integer) ID of the key. + """ + return self.path[-1].get('id') + + @property + def name(self): + """Name getter. Based on the last element of path. - :rtype: :class:`Key` (for setter); or :class:`str` (for getter) - :returns: a new key, cloned from self., with the given name (setter); - or self's name (getter). + :rtype: :class:`str` + :returns: The (string) name of the key. """ - if name: - clone = self._clone() - clone._path[-1]['name'] = name - return clone - elif self.path(): - return self._path[-1].get('name') + return self.path[-1].get('name') + @property def id_or_name(self): - """Getter. Based on the last element of path. + """Getter. Based on the last element of path. - :rtype: :class:`int` (if 'id' is set); or :class:`str` (the 'name') - :returns: True if the last element of the key's path has either an 'id' + :rtype: :class:`int` (if 'id') or :class:`str` (if 'name') + :returns: The last element of the key's path if it is either an 'id' or a 'name'. """ - return self.id() or self.name() + return self.id or self.name + + @property + def dataset_id(self): + """Dataset ID getter. + + :rtype: :class:`str` + :returns: The key's dataset. + """ + return self._dataset_id + + def _make_parent(self): + """Creates a parent key for the current path. + + Extracts all but the last element in the key path and creates a new + key, while still matching the namespace and the dataset ID. + :rtype: :class:`gcloud.datastore.key.Key` or NoneType + :returns: a new `Key` instance, whose path consists of all but the last + element of self's path. If self has only one path element, + returns None. + """ + if self.is_partial: + parent_args = self.flat_path[:-1] + else: + parent_args = self.flat_path[:-2] + if parent_args: + return Key(*parent_args, dataset_id=self.dataset_id, + namespace=self.namespace) + + @property def parent(self): - """Getter: return a new key for the next highest element in path. + """The parent of the current key. - :rtype: :class:`gcloud.datastore.key.Key` + :rtype: :class:`gcloud.datastore.key.Key` or NoneType :returns: a new `Key` instance, whose path consists of all but the last element of self's path. If self has only one path element, - return None. + returns None. """ - if len(self._path) <= 1: - return None - return self.path(self.path()[:-1]) + if self._parent is None: + self._parent = self._make_parent() + + return self._parent def __repr__(self): - return '' % self.path() + return '' % (self.path, self.dataset_id) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 9ae6790daca8..53e4e29a3642 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -16,12 +16,13 @@ import base64 +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers from gcloud.datastore.key import Key -class Query(object): +class Query(_implicit_environ._DatastoreBase): """A Query against the Cloud Datastore. This class serves as an abstraction for creating a query over data @@ -71,7 +72,7 @@ class Query(object): """Mapping of operator strings and their protobuf equivalents.""" def __init__(self, kind=None, dataset=None, namespace=None): - self._dataset = dataset + super(Query, self).__init__(dataset=dataset) self._namespace = namespace self._pb = datastore_pb.Query() self._offset = 0 @@ -162,7 +163,14 @@ def filter(self, property_name, operator, value): property_filter.operator = pb_op_enum # Set the value to filter on based on the type. - helpers._set_protobuf_value(property_filter.value, value) + if property_name == '__key__': + if not isinstance(value, Key): + raise TypeError('__key__ query requires a Key instance.') + key_pb = value.to_protobuf() + property_filter.value.key_value.CopyFrom( + helpers._prepare_key_for_request(key_pb)) + else: + helpers._set_protobuf_value(property_filter.value, value) return clone def ancestor(self, ancestor): @@ -171,7 +179,7 @@ def ancestor(self, ancestor): This will return a clone of the current :class:`Query` filtered by the ancestor provided. For example:: - >>> parent_key = Key.from_path('Person', '1') + >>> parent_key = Key('Person', '1') >>> query = dataset.query('Person') >>> filtered_query = query.ancestor(parent_key) @@ -212,7 +220,7 @@ def ancestor(self, ancestor): # If a list was provided, turn it into a Key. if isinstance(ancestor, list): - ancestor = Key.from_path(*ancestor) + ancestor = Key(*ancestor) # If we don't have a Key value by now, something is wrong. if not isinstance(ancestor, Key): @@ -226,7 +234,8 @@ def ancestor(self, ancestor): ancestor_filter = composite_filter.filter.add().property_filter ancestor_filter.property.name = '__key__' ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR - ancestor_filter.value.key_value.CopyFrom(ancestor.to_protobuf()) + ancestor_pb = helpers._prepare_key_for_request(ancestor.to_protobuf()) + ancestor_filter.value.key_value.CopyFrom(ancestor_pb) return clone diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index e258cda05003..578b97a5376a 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -35,6 +35,47 @@ def test_it(self): self.assertTrue(client._get_app_default_called) +class Test__set_dataset_from_environ(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore import _set_dataset_from_environ + return _set_dataset_from_environ() + + def _test_with_environ(self, environ, expected_result): + import os + from gcloud._testing import _Monkey + from gcloud import datastore + from gcloud.datastore import _implicit_environ + + # Check the environment is unset. + self.assertEqual(_implicit_environ.DATASET, None) + + def custom_getenv(key): + return environ.get(key) + + def custom_get_dataset(dataset_id): + return dataset_id + + with _Monkey(os, getenv=custom_getenv): + with _Monkey(datastore, get_dataset=custom_get_dataset): + self._callFUT() + + self.assertEqual(_implicit_environ.DATASET, expected_result) + + def test_set_from_env_var(self): + from gcloud.datastore import _DATASET_ENV_VAR_NAME + + # Make a custom getenv function to Monkey. + DATASET = 'dataset' + VALUES = { + _DATASET_ENV_VAR_NAME: DATASET, + } + self._test_with_environ(VALUES, DATASET) + + def test_no_env_var_set(self): + self._test_with_environ({}, None) + + class Test_get_dataset(unittest2.TestCase): def _callFUT(self, dataset_id): @@ -56,3 +97,104 @@ def test_it(self): self.assertTrue(isinstance(found.connection(), Connection)) self.assertEqual(found.id(), DATASET_ID) self.assertTrue(client._get_app_default_called) + + +class Test_implicit_behavior(unittest2.TestCase): + + def test__require_dataset(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + original_dataset = _implicit_environ.DATASET + + try: + _implicit_environ.DATASET = None + self.assertRaises(EnvironmentError, + gcloud.datastore._require_dataset) + NEW_DATASET = object() + _implicit_environ.DATASET = NEW_DATASET + self.assertEqual(gcloud.datastore._require_dataset(), NEW_DATASET) + finally: + _implicit_environ.DATASET = original_dataset + + def test_get_entity(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_entity import _Dataset + from gcloud._testing import _Monkey + + CUSTOM_DATASET = _Dataset() + DUMMY_KEY = object() + DUMMY_VAL = object() + CUSTOM_DATASET[DUMMY_KEY] = DUMMY_VAL + with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + result = gcloud.datastore.get_entity(DUMMY_KEY) + self.assertTrue(result is DUMMY_VAL) + + def test_get_entities(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + from gcloud.datastore.test_entity import _Dataset + from gcloud._testing import _Monkey + + CUSTOM_DATASET = _Dataset() + DUMMY_KEYS = [object(), object()] + DUMMY_VALS = [object(), object()] + for key, val in zip(DUMMY_KEYS, DUMMY_VALS): + CUSTOM_DATASET[key] = val + + with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + result = gcloud.datastore.get_entities(DUMMY_KEYS) + self.assertTrue(result == DUMMY_VALS) + + def test_allocate_ids(self): + import gcloud.datastore + from gcloud.datastore import _implicit_environ + from gcloud.datastore.key import Key + from gcloud.datastore.test_entity import _Dataset + from gcloud._testing import _Monkey + + CUSTOM_DATASET = _Dataset() + NUM_IDS = 2 + with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + INCOMPLETE_KEY = Key('KIND') + result = gcloud.datastore.allocate_ids(INCOMPLETE_KEY, NUM_IDS) + + # Check the IDs returned. + self.assertEqual([key.id for key in result], range(1, NUM_IDS + 1)) + + def test_set_DATASET(self): + import os + from gcloud._testing import _Monkey + from gcloud.test_credentials import _Client + from gcloud import credentials + from gcloud.datastore import _implicit_environ + + # Make custom client for doing auth. Have to fake auth since we + # can't monkey patch `datastore.get_dataset` while reloading the + # `datastore.__init__` module. + client = _Client() + + # Fake auth variables. + DATASET = 'dataset' + + # Make a custom getenv function to Monkey. + VALUES = { + 'GCLOUD_DATASET_ID': DATASET, + } + + def custom_getenv(key): + return VALUES.get(key) + + # Perform the import again with our test patches. + with _Monkey(credentials, client=client): + with _Monkey(os, getenv=custom_getenv): + import gcloud.datastore + reload(gcloud.datastore) + + # Check that the DATASET was correctly implied from the environ. + implicit_dataset = _implicit_environ.DATASET + self.assertEqual(implicit_dataset.id(), DATASET) + # Check that the credentials on the implicit DATASET was set on the + # fake client. + cnxn_credentials = implicit_dataset.connection().credentials + self.assertTrue(cnxn_credentials is client._signed) diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 5a5e8e085bbc..81a098a2efc9 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -22,6 +22,13 @@ def _getTargetClass(self): return Connection + def _make_key_pb(self, dataset_id, id=1234): + from gcloud.datastore.key import Key + path_args = ('Kind',) + if id is not None: + path_args += (id,) + return Key(*path_args, dataset_id=dataset_id).to_protobuf() + def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) @@ -206,10 +213,9 @@ def test_dataset(self): def test_lookup_single_key_empty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -229,14 +235,13 @@ def test_lookup_single_key_empty_response(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) def test_lookup_single_key_empty_response_w_eventual(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -256,17 +261,15 @@ def test_lookup_single_key_empty_response_w_eventual(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) self.assertEqual(request.read_options.read_consistency, datastore_pb.ReadOptions.EVENTUAL) self.assertEqual(request.read_options.transaction, '') def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): - from gcloud.datastore.key import Key - DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) conn = self._makeOne() conn.transaction(Transaction(TRANSACTION)) self.assertRaises( @@ -274,11 +277,10 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): def test_lookup_single_key_empty_response_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() conn.transaction(Transaction(TRANSACTION)) @@ -299,15 +301,14 @@ def test_lookup_single_key_empty_response_w_transaction(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) self.assertEqual(request.read_options.transaction, TRANSACTION) def test_lookup_single_key_nonempty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() entity = datastore_pb.Entity() entity.key.CopyFrom(key_pb) @@ -332,15 +333,14 @@ def test_lookup_single_key_nonempty_response(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) - self.assertEqual(keys[0], key_pb) + _compare_key_pb_after_request(self, key_pb, keys[0]) def test_lookup_multiple_keys_empty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -360,16 +360,15 @@ def test_lookup_multiple_keys_empty_response(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb = datastore_pb.LookupResponse() er_1 = rsp_pb.missing.add() er_1.entity.key.CopyFrom(key_pb1) @@ -397,14 +396,13 @@ def test_lookup_multiple_keys_w_missing(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing_non_empty(self): - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) conn = self._makeOne() missing = ['this', 'list', 'is', 'not', 'empty'] self.assertRaises( @@ -413,11 +411,10 @@ def test_lookup_multiple_keys_w_missing_non_empty(self): def test_lookup_multiple_keys_w_deferred(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb = datastore_pb.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) @@ -447,14 +444,13 @@ def test_lookup_multiple_keys_w_deferred(self): request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) def test_lookup_multiple_keys_w_deferred_non_empty(self): - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) conn = self._makeOne() deferred = ['this', 'list', 'is', 'not', 'empty'] self.assertRaises( @@ -463,11 +459,10 @@ def test_lookup_multiple_keys_w_deferred_non_empty(self): def test_lookup_multiple_keys_w_deferred_from_backend_but_not_passed(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb1 = datastore_pb.LookupResponse() entity1 = datastore_pb.Entity() entity1.key.CopyFrom(key_pb1) @@ -505,8 +500,8 @@ def test_lookup_multiple_keys_w_deferred_from_backend_but_not_passed(self): request.ParseFromString(cw[0]['body']) keys = list(request.key) self.assertEqual(len(keys), 2) - self.assertEqual(keys[0], key_pb1) - self.assertEqual(keys[1], key_pb2) + _compare_key_pb_after_request(self, key_pb1, keys[0]) + _compare_key_pb_after_request(self, key_pb2, keys[1]) self._verifyProtobufCall(cw[1], URI, conn) request.ParseFromString(cw[1]['body']) @@ -740,10 +735,9 @@ def test_begin_transaction_explicit_serialize(self): def test_commit_wo_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -775,13 +769,12 @@ def test_commit_wo_transaction(self): def test_commit_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key class Xact(object): def id(self): return 'xact' DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -884,16 +877,15 @@ def test_allocate_ids_empty(self): def test_allocate_ids_non_empty(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' before_key_pbs = [ - Key(path=[{'kind': 'Kind'}]).to_protobuf(), - Key(path=[{'kind': 'Kind'}]).to_protobuf(), + self._make_key_pb(DATASET_ID, id=None), + self._make_key_pb(DATASET_ID, id=None), ] after_key_pbs = [ - Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf(), - Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf(), + self._make_key_pb(DATASET_ID), + self._make_key_pb(DATASET_ID, id=2345), ] rsp_pb = datastore_pb.AllocateIdsResponse() rsp_pb.key.add().CopyFrom(after_key_pbs[0]) @@ -915,14 +907,15 @@ def test_allocate_ids_non_empty(self): rq_class = datastore_pb.AllocateIdsRequest request = rq_class() request.ParseFromString(cw['body']) - self.assertEqual(list(request.key), before_key_pbs) + self.assertEqual(len(request.key), len(before_key_pbs)) + for key_before, key_after in zip(before_key_pbs, request.key): + _compare_key_pb_after_request(self, key_before, key_after) def test_save_entity_wo_transaction_w_upsert(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -947,7 +940,7 @@ def test_save_entity_wo_transaction_w_upsert(self): upserts = list(mutation.upsert) self.assertEqual(len(upserts), 1) upsert = upserts[0] - self.assertEqual(upsert.key, key_pb) + _compare_key_pb_after_request(self, key_pb, upsert.key) props = list(upsert.property) self.assertEqual(len(props), 1) self.assertEqual(props[0].name, 'foo') @@ -958,11 +951,10 @@ def test_save_entity_wo_transaction_w_upsert(self): def test_save_entity_w_exclude_from_indexes(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key import operator DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -989,7 +981,7 @@ def test_save_entity_w_exclude_from_indexes(self): upserts = list(mutation.upsert) self.assertEqual(len(upserts), 1) upsert = upserts[0] - self.assertEqual(upsert.key, key_pb) + _compare_key_pb_after_request(self, key_pb, upsert.key) props = sorted(upsert.property, key=operator.attrgetter('name'), reverse=True) @@ -1008,11 +1000,10 @@ def test_save_entity_w_exclude_from_indexes(self): def test_save_entity_wo_transaction_w_auto_id(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind'}]).to_protobuf() - updated_key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID, id=None) + updated_key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() mr_pb = rsp_pb.mutation_result mr_pb.index_updates = 0 @@ -1039,7 +1030,7 @@ def test_save_entity_wo_transaction_w_auto_id(self): mutation = request.mutation inserts = list(mutation.insert_auto_id) insert = inserts[0] - self.assertEqual(insert.key, key_pb) + _compare_key_pb_after_request(self, key_pb, insert.key) props = list(insert.property) self.assertEqual(len(props), 1) self.assertEqual(props[0].name, 'foo') @@ -1052,7 +1043,6 @@ def test_save_entity_wo_transaction_w_auto_id(self): def test_save_entity_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -1060,7 +1050,7 @@ class Xact(object): def mutation(self): return mutation DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -1074,7 +1064,6 @@ def mutation(self): def test_save_entity_w_transaction_nested_entity(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.entity import Entity - from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -1084,7 +1073,7 @@ def mutation(self): DATASET_ID = 'DATASET' nested = Entity() nested['bar'] = u'Bar' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -1097,10 +1086,9 @@ def mutation(self): def test_delete_entities_wo_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -1126,12 +1114,11 @@ def test_delete_entities_wo_transaction(self): deletes = list(mutation.delete) self.assertEqual(len(deletes), 1) delete = deletes[0] - self.assertEqual(delete, key_pb) + _compare_key_pb_after_request(self, key_pb, delete) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) def test_delete_entities_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -1139,7 +1126,7 @@ class Xact(object): def mutation(self): return mutation DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -1183,3 +1170,13 @@ def __init__(self, id): def id(self): return self._id + + +def _compare_key_pb_after_request(test, key_before, key_after): + test.assertFalse(key_after.partition_id.HasField('dataset_id')) + test.assertEqual(key_before.partition_id.namespace, + key_after.partition_id.namespace) + test.assertEqual(len(key_before.path_element), + len(key_after.path_element)) + for elt1, elt2 in zip(key_before.path_element, key_after.path_element): + test.assertEqual(elt1, elt2) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 955eebc8eeca..5ea02bc8fc19 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -81,7 +81,7 @@ def test_get_entity_miss(self): DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=[{'kind': 'Kind', 'id': 1234}]) + key = Key('Kind', 1234, dataset_id=DATASET_ID) self.assertEqual(dataset.get_entity(key), None) def test_get_entity_hit(self): @@ -101,12 +101,12 @@ def test_get_entity_hit(self): prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH) + key = Key(KIND, ID, dataset_id=DATASET_ID) result = dataset.get_entity(key) key = result.key() - self.assertEqual(key._dataset_id, DATASET_ID) - self.assertEqual(key.path(), PATH) - self.assertEqual(list(result), ['foo']) + self.assertEqual(key.dataset_id, DATASET_ID) + self.assertEqual(key.path, PATH) + self.assertEqual(result.to_dict().keys(), ['foo']) self.assertEqual(result['foo'], 'Foo') def test_get_entity_path(self): @@ -127,18 +127,15 @@ def test_get_entity_path(self): dataset = self._makeOne(DATASET_ID, connection) result = dataset.get_entity([KIND, ID]) key = result.key() - self.assertEqual(key._dataset_id, DATASET_ID) - self.assertEqual(key.path(), PATH) - self.assertEqual(list(result), ['foo']) + self.assertEqual(key.dataset_id, DATASET_ID) + self.assertEqual(key.path, PATH) + self.assertEqual(result.to_dict().keys(), ['foo']) self.assertEqual(result['foo'], 'Foo') - def test_get_entity_odd_nonetype(self): + def test_get_entity_nonetype(self): DATASET_ID = 'DATASET' - KIND = 'Kind' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - with self.assertRaises(ValueError): - dataset.get_entity([KIND]) with self.assertRaises(TypeError): dataset.get_entity(None) @@ -147,7 +144,7 @@ def test_get_entities_miss(self): DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=[{'kind': 'Kind', 'id': 1234}]) + key = Key('Kind', 1234, dataset_id=DATASET_ID) self.assertEqual(dataset.get_entities([key]), []) def test_get_entities_miss_w_missing(self): @@ -156,7 +153,6 @@ def test_get_entities_miss_w_missing(self): DATASET_ID = 'DATASET' KIND = 'Kind' ID = 1234 - PATH = [{'kind': KIND, 'id': ID}] missed = datastore_pb.Entity() missed.key.partition_id.dataset_id = DATASET_ID path_element = missed.key.path_element.add() @@ -165,7 +161,7 @@ def test_get_entities_miss_w_missing(self): connection = _Connection() connection._missing = [missed] dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH, dataset_id=DATASET_ID) + key = Key(KIND, ID, dataset_id=DATASET_ID) missing = [] entities = dataset.get_entities([key], missing=missing) self.assertEqual(entities, []) @@ -175,12 +171,9 @@ def test_get_entities_miss_w_missing(self): def test_get_entities_miss_w_deferred(self): from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - KIND = 'Kind' - ID = 1234 - PATH = [{'kind': KIND, 'id': ID}] connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH, dataset_id=DATASET_ID) + key = Key('Kind', 1234, dataset_id=DATASET_ID) connection._deferred = [key.to_protobuf()] deferred = [] entities = dataset.get_entities([key], deferred=deferred) @@ -205,12 +198,13 @@ def test_get_entities_hit(self): prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH) + key = Key(KIND, ID, dataset_id=DATASET_ID) result, = dataset.get_entities([key]) - key = result.key() - self.assertEqual(key._dataset_id, DATASET_ID) - self.assertEqual(key.path(), PATH) - self.assertEqual(list(result), ['foo']) + new_key = result.key() + self.assertFalse(new_key is key) + self.assertEqual(new_key.dataset_id, DATASET_ID) + self.assertEqual(new_key.path, PATH) + self.assertEqual(result.to_dict().keys(), ['foo']) self.assertEqual(result['foo'], 'Foo') def test_allocate_ids(self): @@ -227,7 +221,8 @@ def test_allocate_ids(self): DATASET = self._makeOne(DATASET_ID, connection=CONNECTION) result = DATASET.allocate_ids(INCOMPLETE_KEY, NUM_IDS) - # Check the IDs returned match _PathElementProto. + # Check the IDs returned match. + self.assertEqual(INCOMPLETE_KEY._called_complete_key, range(NUM_IDS)) self.assertEqual([key._id for key in result], range(NUM_IDS)) # Check connection is called correctly. diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index c036720d4241..721d95d5f884 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -23,8 +23,10 @@ class TestEntity(unittest2.TestCase): def _getTargetClass(self): + from gcloud.datastore import _implicit_environ from gcloud.datastore.entity import Entity + _implicit_environ.DATASET = None return Entity def _makeOne(self, dataset=_MARKER, kind=_KIND, exclude_from_indexes=()): @@ -59,8 +61,8 @@ def test_key_getter(self): entity = self._makeOne() key = entity.key() self.assertIsInstance(key, Key) - self.assertEqual(key._dataset_id, None) - self.assertEqual(key.kind(), _KIND) + self.assertEqual(key.dataset_id, entity.dataset().id()) + self.assertEqual(key.kind, _KIND) def test_key_setter(self): entity = self._makeOne() @@ -68,17 +70,70 @@ def test_key_setter(self): entity.key(key) self.assertTrue(entity.key() is key) + def test___delitem__exists(self): + entity = self._makeOne() + entity['foo'] = 'bar' + # This will cause an error (not a failure) if it doesn't work. + # Can't use a try-except because coverage.py doesn't like a branch + # which never occurs. + del entity['foo'] + + def test___delitem__not_exist(self): + entity = self._makeOne() + fail_occurred = False + try: + del entity['foo'] + except KeyError: + fail_occurred = True + self.assertTrue(fail_occurred) + + def test_clear_properties(self): + entity = self._makeOne() + entity['foo'] = 0 + entity['bar'] = 1 + self.assertEqual(entity.to_dict(), {'foo': 0, 'bar': 1}) + + entity.clear_properties() + self.assertEqual(entity.to_dict(), {}) + + def test_update_properties_dict(self): + entity = self._makeOne() + self.assertEqual(entity.to_dict(), {}) + + NEW_VALUES = {'prop1': 0, 'prop2': 1} + entity.update_properties(NEW_VALUES) + self.assertEqual(entity.to_dict(), NEW_VALUES) + + def test_update_properties_keywords(self): + entity = self._makeOne() + self.assertEqual(entity.to_dict(), {}) + + NEW_VALUES = {'prop1': 0, 'prop2': 1} + entity.update_properties(**NEW_VALUES) + self.assertEqual(entity.to_dict(), NEW_VALUES) + + entity.update_properties(prop1=10, prop2=11) + NEW_VALUES_AGAIN = {'prop1': 10, 'prop2': 11} + self.assertEqual(entity.to_dict(), NEW_VALUES_AGAIN) + + def test_update_properties_invalid(self): + entity = self._makeOne() + + dict1 = {'foo': 'bar'} + dict2 = {'baz': 'zip'} + self.assertRaises(TypeError, entity.update_properties, dict1, dict2) + def test_from_key_wo_dataset(self): from gcloud.datastore.key import Key klass = self._getTargetClass() - key = Key().kind(_KIND).id(_ID) + key = Key(_KIND, _ID, dataset_id='DATASET') entity = klass.from_key(key) self.assertTrue(entity.dataset() is None) self.assertEqual(entity.kind(), _KIND) key = entity.key() - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.id(), _ID) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.id, _ID) def test_from_key_w_dataset(self): from gcloud.datastore.dataset import Dataset @@ -86,13 +141,13 @@ def test_from_key_w_dataset(self): klass = self._getTargetClass() dataset = Dataset(_DATASET_ID) - key = Key().kind(_KIND).id(_ID) + key = Key(_KIND, _ID, dataset_id=_DATASET_ID) entity = klass.from_key(key, dataset) self.assertTrue(entity.dataset() is dataset) self.assertEqual(entity.kind(), _KIND) key = entity.key() - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.id(), _ID) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.id, _ID) def test__must_key_no_key(self): from gcloud.datastore.entity import NoKey @@ -125,8 +180,13 @@ def test_reload_miss(self): def test_reload_hit(self): dataset = _Dataset() - dataset['KEY'] = {'foo': 'Bar'} + + fake_entity = self._makeOne(dataset=dataset) + fake_entity['foo'] = 'Bar' + key = _Key() + dataset[key._key] = fake_entity + entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' @@ -227,7 +287,7 @@ def test___repr___w_key_non_empty(self): connection = _Connection() dataset = _Dataset(connection) key = _Key() - key.path('/bar/baz') + key._path = '/bar/baz' entity = self._makeOne(dataset) entity.key(key) entity['foo'] = 'Foo' @@ -241,22 +301,35 @@ class _Key(object): _path = None _id = None - def id(self, id_to_set): - self._called_id = id_to_set - clone = _Key() - clone._id = id_to_set - return clone + def __init__(self): + self._called_complete_key = [] def to_protobuf(self): return self._key + def complete_key(self, id_or_name): + self._called_complete_key.append(id_or_name) + clone = _Key() + clone._id = id_or_name + return clone + + def compare_to_proto(self, key_pb): + # DJH: This is duplicated from Key.compare_to_proto. + self._path = [] + for element in key_pb.path_element: + key_part = {} + for descriptor, value in element._fields.items(): + key_part[descriptor.name] = value + self._path.append(key_part) + return self + + @property def is_partial(self): return self._partial - def path(self, path=_MARKER): - if path is self._MARKER: - return self._path - self._path = path + @property + def path(self): + return self._path class _Dataset(dict): @@ -265,6 +338,13 @@ def __init__(self, connection=None): super(_Dataset, self).__init__() self._connection = connection + def __bool__(self): + # Make sure the objects are Truth-y since an empty + # dict with _connection set will still be False-y. + return True + + __nonzero__ = __bool__ + def id(self): return _DATASET_ID @@ -274,6 +354,12 @@ def connection(self): def get_entity(self, key): return self.get(key) + def get_entities(self, keys): + return [self.get(key) for key in keys] + + def allocate_ids(self, incomplete_key, num_ids): + return [incomplete_key.complete_key(i + 1) for i in range(num_ids)] + class _Connection(object): _transaction = _saved = _deleted = None diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 70db552aaa7a..cbf3d1c31f46 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -20,8 +20,11 @@ class Test_entity_from_protobuf(unittest2.TestCase): _MARKER = object() def _callFUT(self, val, dataset=_MARKER): + from gcloud.datastore import _implicit_environ from gcloud.datastore.helpers import entity_from_protobuf + _implicit_environ.DATASET = None + if dataset is self._MARKER: return entity_from_protobuf(val) @@ -44,10 +47,10 @@ def test_wo_dataset(self): self.assertEqual(entity.kind(), _KIND) self.assertEqual(entity['foo'], 'Foo') key = entity.key() - self.assertEqual(key._dataset_id, _DATASET_ID) - self.assertEqual(key.namespace(), None) - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.id(), _ID) + self.assertEqual(key.dataset_id, _DATASET_ID) + self.assertEqual(key.namespace, None) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.id, _ID) def test_w_dataset(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb @@ -68,10 +71,10 @@ def test_w_dataset(self): self.assertEqual(entity.kind(), _KIND) self.assertEqual(entity['foo'], 'Foo') key = entity.key() - self.assertEqual(key._dataset_id, _DATASET_ID) - self.assertEqual(key.namespace(), None) - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.id(), _ID) + self.assertEqual(key.dataset_id, _DATASET_ID) + self.assertEqual(key.namespace, None) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.id, _ID) class Test_key_from_protobuf(unittest2.TestCase): @@ -99,37 +102,33 @@ def _makePB(self, dataset_id=None, namespace=None, path=()): def test_w_dataset_id_in_pb(self): _DATASET = 'DATASET' - pb = self._makePB(_DATASET) + pb = self._makePB(path=[{'kind': 'KIND'}], dataset_id=_DATASET) key = self._callFUT(pb) - self.assertEqual(key._dataset_id, _DATASET) - self.assertEqual(key.namespace(), None) + self.assertEqual(key.dataset_id, _DATASET) + self.assertEqual(key.namespace, None) def test_w_namespace_in_pb(self): + _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' - pb = self._makePB(namespace=_NAMESPACE) + pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE, + dataset_id=_DATASET) key = self._callFUT(pb) - self.assertEqual(key._dataset_id, None) - self.assertEqual(key.namespace(), _NAMESPACE) + self.assertEqual(key.dataset_id, _DATASET) + self.assertEqual(key.namespace, _NAMESPACE) def test_w_path_in_pb(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - pb = self._makePB(_DATASET, _NAMESPACE) - _PARENT = 'PARENT' - _CHILD = 'CHILD' - _GRANDCHILD = 'GRANDCHILD' - _ID = 1234 - _ID2 = 5678 - _NAME = 'NAME' - _NAME2 = 'NAME2' _PATH = [ - {'kind': _PARENT, 'name': _NAME}, - {'kind': _CHILD, 'id': _ID}, - {'kind': _GRANDCHILD, 'id': _ID2, 'name': _NAME2}, + {'kind': 'PARENT', 'name': 'NAME'}, + {'kind': 'CHILD', 'id': 1234}, + {'kind': 'GRANDCHILD', 'id': 5678}, ] - pb = self._makePB(path=_PATH) + pb = self._makePB(path=_PATH, dataset_id='DATASET') key = self._callFUT(pb) - self.assertEqual(key.path(), _PATH) + self.assertEqual(key.path, _PATH) + + def test_w_nothing_in_pb(self): + pb = self._makePB() + self.assertRaises(ValueError, self._callFUT, pb) class Test__pb_attr_value(unittest2.TestCase): @@ -165,11 +164,7 @@ def test_datetime_w_zone(self): def test_key(self): from gcloud.datastore.key import Key - _DATASET = 'DATASET' - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = Key(dataset_id=_DATASET, path=_PATH) + key = Key('PATH', 1234, dataset_id='DATASET') name, value = self._callFUT(key) self.assertEqual(name, 'key_value') self.assertEqual(value, key.to_protobuf()) @@ -263,12 +258,8 @@ def test_key(self): from gcloud.datastore.datastore_v1_pb2 import Value from gcloud.datastore.key import Key - _DATASET = 'DATASET' - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] pb = Value() - expected = Key(dataset_id=_DATASET, path=_PATH).to_protobuf() + expected = Key('KIND', 1234, dataset_id='DATASET').to_protobuf() pb.key_value.CopyFrom(expected) found = self._callFUT(pb) self.assertEqual(found.to_protobuf(), expected) @@ -299,6 +290,8 @@ def test_entity(self): pb = Value() entity_pb = pb.entity_value + entity_pb.key.path_element.add(kind='KIND') + entity_pb.key.partition_id.dataset_id = 'DATASET' prop_pb = entity_pb.property.add() prop_pb.name = 'foo' prop_pb.value.string_value = 'Foo' @@ -367,12 +360,8 @@ def test_datetime(self): def test_key(self): from gcloud.datastore.key import Key - _DATASET = 'DATASET' - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] pb = self._makePB() - key = Key(dataset_id=_DATASET, path=_PATH) + key = Key('KIND', 1234, dataset_id='DATASET') self._callFUT(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf()) @@ -455,7 +444,7 @@ def test_entity_w_key(self): from gcloud.datastore.key import Key pb = self._makePB() - key = Key(path=[{'kind': 'KIND', 'id': 123}]) + key = Key('KIND', 123, dataset_id='DATASET') entity = Entity().key(key) entity['foo'] = u'Foo' self._callFUT(pb, entity) @@ -475,3 +464,28 @@ def test_list(self): self.assertEqual(marshalled[0].string_value, values[0]) self.assertEqual(marshalled[1].integer_value, values[1]) self.assertEqual(marshalled[2].double_value, values[2]) + + +class Test__prepare_key_for_request(unittest2.TestCase): + + def _callFUT(self, key_pb): + from gcloud.datastore.helpers import _prepare_key_for_request + + return _prepare_key_for_request(key_pb) + + def test_prepare_dataset_valid(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + key = datastore_pb.Key() + key.partition_id.dataset_id = 'foo' + new_key = self._callFUT(key) + self.assertFalse(new_key is key) + + key_without = datastore_pb.Key() + new_key.ClearField('partition_id') + self.assertEqual(new_key, key_without) + + def test_prepare_dataset_unset(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + key = datastore_pb.Key() + new_key = self._callFUT(key) + self.assertTrue(new_key is key) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index c90e301d5eb6..b641ce225532 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -17,66 +17,197 @@ class TestKey(unittest2.TestCase): + def setUp(self): + self._DEFAULT_DATASET = 'DATASET' + def _getTargetClass(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + + _implicit_environ.DATASET = Dataset(self._DEFAULT_DATASET) return Key - def _makeOne(self, path=None, namespace=None, dataset_id=None): - return self._getTargetClass()(path, namespace, dataset_id) + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_empty(self): + self.assertRaises(ValueError, self._makeOne) - def test_ctor_defaults(self): - key = self._makeOne() - self.assertEqual(key._dataset_id, None) - self.assertEqual(key.namespace(), None) - self.assertEqual(key.kind(), '') - self.assertEqual(key.path(), [{'kind': ''}]) + def test_ctor_no_dataset(self): + from gcloud._testing import _Monkey + from gcloud.datastore import _implicit_environ + klass = self._getTargetClass() + with _Monkey(_implicit_environ, DATASET=None): + self.assertRaises(ValueError, klass, 'KIND') def test_ctor_explicit(self): - _DATASET = 'DATASET' + _DATASET = 'DATASET-ALT' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) - self.assertEqual(key._dataset_id, _DATASET) - self.assertEqual(key.namespace(), _NAMESPACE) - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.path(), _PATH) + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + dataset_id=_DATASET) + self.assertNotEqual(_DATASET, self._DEFAULT_DATASET) + self.assertEqual(key.dataset_id, _DATASET) + self.assertEqual(key.namespace, _NAMESPACE) + self.assertEqual(key.kind, _KIND) + self.assertEqual(key.path, _PATH) + + def test_ctor_bad_kind(self): + self.assertRaises(ValueError, self._makeOne, object()) + + def test_ctor_bad_id_or_name(self): + self.assertRaises(ValueError, self._makeOne, 'KIND', object()) def test__clone(self): - _DATASET = 'DATASET' + _DATASET = 'DATASET-ALT' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + dataset_id=_DATASET) clone = key._clone() - self.assertEqual(clone._dataset_id, _DATASET) - self.assertEqual(clone.namespace(), _NAMESPACE) - self.assertEqual(clone.kind(), _KIND) - self.assertEqual(clone.path(), _PATH) + self.assertEqual(clone.dataset_id, _DATASET) + self.assertEqual(clone.namespace, _NAMESPACE) + self.assertEqual(clone.kind, _KIND) + self.assertEqual(clone.path, _PATH) + + def test_complete_key_on_partial_w_id(self): + key = self._makeOne('KIND') + _ID = 1234 + new_key = key.complete_key(_ID) + self.assertFalse(key is new_key) + self.assertEqual(new_key.id, _ID) + self.assertEqual(new_key.name, None) + + def test_complete_key_on_partial_w_name(self): + key = self._makeOne('KIND') + _NAME = 'NAME' + new_key = key.complete_key(_NAME) + self.assertFalse(key is new_key) + self.assertEqual(new_key.id, None) + self.assertEqual(new_key.name, _NAME) + + def test_complete_key_on_partial_w_invalid(self): + key = self._makeOne('KIND') + self.assertRaises(ValueError, key.complete_key, object()) + + def test_complete_key_on_complete(self): + key = self._makeOne('KIND', 1234) + self.assertRaises(ValueError, key.complete_key, 5678) + + def test_compare_to_proto_incomplete_w_id(self): + _ID = 1234 + key = self._makeOne('KIND') + pb = key.to_protobuf() + pb.path_element[0].id = _ID + new_key = key.compare_to_proto(pb) + self.assertFalse(new_key is key) + self.assertEqual(new_key.id, _ID) + self.assertEqual(new_key.name, None) + + def test_compare_to_proto_incomplete_w_name(self): + _NAME = 'NAME' + key = self._makeOne('KIND') + pb = key.to_protobuf() + pb.path_element[0].name = _NAME + new_key = key.compare_to_proto(pb) + self.assertFalse(new_key is key) + self.assertEqual(new_key.id, None) + self.assertEqual(new_key.name, _NAME) + + def test_compare_to_proto_incomplete_w_incomplete(self): + # DJH: Should `compare_to_proto` require the pb is complete? + key = self._makeOne('KIND') + pb = key.to_protobuf() + new_key = key.compare_to_proto(pb) + self.assertTrue(new_key is key) + + def test_compare_to_proto_incomplete_w_bad_path(self): + key = self._makeOne('KIND1', 1234, 'KIND2') + pb = key.to_protobuf() + pb.path_element[0].kind = 'NO_KIND' + self.assertRaises(ValueError, key.compare_to_proto, pb) + + def test_compare_to_proto_complete_w_id(self): + key = self._makeOne('KIND', 1234) + pb = key.to_protobuf() + pb.path_element[0].id = 5678 + self.assertRaises(ValueError, key.compare_to_proto, pb) + + def test_compare_to_proto_complete_w_name(self): + key = self._makeOne('KIND', 1234) + pb = key.to_protobuf() + pb.path_element[0].name = 'NAME' + self.assertRaises(ValueError, key.compare_to_proto, pb) + + def test_compare_to_proto_complete_w_incomplete(self): + key = self._makeOne('KIND', 1234) + pb = key.to_protobuf() + pb.path_element[0].ClearField('id') + self.assertRaises(ValueError, key.compare_to_proto, pb) + + def test_compare_to_proto_complete_diff_dataset(self): + key = self._makeOne('KIND', 1234) + pb = key.to_protobuf() + pb.partition_id.dataset_id = 's~' + key.dataset_id + new_key = key.compare_to_proto(pb) + self.assertTrue(new_key is key) + + def test_compare_to_proto_complete_bad_dataset(self): + key = self._makeOne('KIND', 1234) + pb = key.to_protobuf() + pb.partition_id.dataset_id = 'BAD_PRE~' + key.dataset_id + self.assertRaises(ValueError, key.compare_to_proto, pb) + + def test_compare_to_proto_complete_valid_namespace(self): + key = self._makeOne('KIND', 1234, namespace='NAMESPACE') + pb = key.to_protobuf() + new_key = key.compare_to_proto(pb) + self.assertTrue(new_key is key) + + def test_compare_to_proto_complete_namespace_unset_on_pb(self): + key = self._makeOne('KIND', 1234, namespace='NAMESPACE') + pb = key.to_protobuf() + pb.partition_id.ClearField('namespace') + self.assertRaises(ValueError, key.compare_to_proto, pb) + + def test_compare_to_proto_complete_namespace_unset_on_key(self): + key = self._makeOne('KIND', 1234) + pb = key.to_protobuf() + pb.partition_id.namespace = 'NAMESPACE' + self.assertRaises(ValueError, key.compare_to_proto, pb) def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB - key = self._makeOne() + _KIND = 'KIND' + key = self._makeOne(_KIND) pb = key.to_protobuf() self.assertTrue(isinstance(pb, KeyPB)) - self.assertEqual(pb.partition_id.dataset_id, '') + self.assertEqual(pb.partition_id.dataset_id, self._DEFAULT_DATASET) self.assertEqual(pb.partition_id.namespace, '') + self.assertFalse(pb.partition_id.HasField('namespace')) + + # Check the element PB matches the partial key and kind. elem, = list(pb.path_element) - self.assertEqual(elem.kind, '') + self.assertEqual(elem.kind, _KIND) self.assertEqual(elem.name, '') + self.assertFalse(elem.HasField('name')) self.assertEqual(elem.id, 0) + self.assertFalse(elem.HasField('id')) - def test_to_protobuf_w_explicit_dataset_no_prefix(self): - _DATASET = 'DATASET' - key = self._makeOne(dataset_id=_DATASET) + def test_to_protobuf_w_explicit_dataset(self): + _DATASET = 'DATASET-ALT' + key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - key = self._makeOne(namespace=_NAMESPACE) + key = self._makeOne('KIND', namespace=_NAMESPACE) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace, _NAMESPACE) @@ -85,206 +216,77 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - _PATH = [ - {'kind': _PARENT, 'name': _NAME}, - {'kind': _CHILD, 'id': _ID}, - {}, - ] - key = self._makeOne(path=_PATH) + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) pb = key.to_protobuf() elems = list(pb.path_element) - self.assertEqual(len(elems), len(_PATH)) + self.assertEqual(len(elems), 2) self.assertEqual(elems[0].kind, _PARENT) self.assertEqual(elems[0].name, _NAME) self.assertEqual(elems[1].kind, _CHILD) self.assertEqual(elems[1].id, _ID) - self.assertEqual(elems[2].kind, '') - self.assertEqual(elems[2].name, '') - self.assertEqual(elems[2].id, 0) - - def test_from_path_empty(self): - key = self._getTargetClass().from_path() - self.assertEqual(key._dataset_id, None) - self.assertEqual(key.namespace(), None) - self.assertEqual(key.kind(), '') - self.assertEqual(key.path(), [{'kind': ''}]) - - def test_from_path_single_element(self): - self.assertRaises(ValueError, self._getTargetClass().from_path, 'abc') - - def test_from_path_three_elements(self): - self.assertRaises(ValueError, self._getTargetClass().from_path, - 'abc', 'def', 'ghi') - - def test_from_path_two_elements_second_string(self): - key = self._getTargetClass().from_path('abc', 'def') - self.assertEqual(key.kind(), 'abc') - self.assertEqual(key.path(), [{'kind': 'abc', 'name': 'def'}]) - - def test_from_path_two_elements_second_int(self): - key = self._getTargetClass().from_path('abc', 123) - self.assertEqual(key.kind(), 'abc') - self.assertEqual(key.path(), [{'kind': 'abc', 'id': 123}]) - - def test_from_path_nested(self): - key = self._getTargetClass().from_path('abc', 'def', 'ghi', 123) - self.assertEqual(key.kind(), 'ghi') - expected_path = [ - {'kind': 'abc', 'name': 'def'}, - {'kind': 'ghi', 'id': 123}, - ] - self.assertEqual(key.path(), expected_path) + + def test_to_protobuf_w_no_kind(self): + _DATASET = 'DATASET-ALT' + key = self._makeOne('KIND', dataset_id=_DATASET) + key._path[-1].pop('kind') + pb = key.to_protobuf() + self.assertEqual(pb.partition_id.dataset_id, _DATASET) + # DJH: Should the code fail on this? The backend certainly will. + self.assertFalse(pb.path_element[0].HasField('kind')) def test_is_partial_no_name_or_id(self): - key = self._makeOne() - self.assertTrue(key.is_partial()) + key = self._makeOne('KIND') + self.assertTrue(key.is_partial) def test_is_partial_w_id(self): - _KIND = 'KIND' _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(path=_PATH) - self.assertFalse(key.is_partial()) + key = self._makeOne('KIND', _ID) + self.assertFalse(key.is_partial) def test_is_partial_w_name(self): - _KIND = 'KIND' - _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - key = self._makeOne(path=_PATH) - self.assertFalse(key.is_partial()) - - def test_namespace_setter(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' - _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - key = self._makeOne(path=_PATH, dataset_id=_DATASET) - after = key.namespace(_NAMESPACE) - self.assertFalse(after is key) - self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertEqual(after._dataset_id, _DATASET) - self.assertEqual(after.namespace(), _NAMESPACE) - self.assertEqual(after.path(), _PATH) - - def test_path_setter(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - key = self._makeOne(namespace=_NAMESPACE, dataset_id=_DATASET) - after = key.path(_PATH) - self.assertFalse(after is key) - self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertEqual(after._dataset_id, _DATASET) - self.assertEqual(after.namespace(), _NAMESPACE) - self.assertEqual(after.path(), _PATH) - - def test_kind_getter_empty_path(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - key = self._makeOne(namespace=_NAMESPACE, dataset_id=_DATASET) - key._path = () # edge case - self.assertEqual(key.kind(), None) - - def test_kind_setter(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - _KIND_BEFORE = 'KIND_BEFORE' - _KIND_AFTER = 'KIND_AFTER' - _NAME = 'NAME' - _PATH = [{'kind': _KIND_BEFORE, 'name': _NAME}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) - after = key.kind(_KIND_AFTER) - self.assertFalse(after is key) - self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertEqual(after._dataset_id, _DATASET) - self.assertEqual(after.namespace(), _NAMESPACE) - self.assertEqual(after.path(), [{'kind': _KIND_AFTER, 'name': _NAME}]) - - def test_id_getter_empty_path(self): - key = self._makeOne() - key._path = () # edge case - self.assertEqual(key.id(), None) - - def test_id_setter(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' - _ID_BEFORE = 1234 - _ID_AFTER = 5678 - _PATH = [{'kind': _KIND, 'id': _ID_BEFORE}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) - after = key.id(_ID_AFTER) - self.assertFalse(after is key) - self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertEqual(after._dataset_id, _DATASET) - self.assertEqual(after.namespace(), _NAMESPACE) - self.assertEqual(after.path(), [{'kind': _KIND, 'id': _ID_AFTER}]) - - def test_name_getter_empty_path(self): - key = self._makeOne() - key._path = () # edge case - self.assertEqual(key.name(), None) - - def test_name_setter(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - _KIND = 'KIND' - _NAME_BEFORE = 'NAME_BEFORE' - _NAME_AFTER = 'NAME_AFTER' - _PATH = [{'kind': _KIND, 'name': _NAME_BEFORE}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) - after = key.name(_NAME_AFTER) - self.assertFalse(after is key) - self.assertTrue(isinstance(after, self._getTargetClass())) - self.assertEqual(after._dataset_id, _DATASET) - self.assertEqual(after.namespace(), _NAMESPACE) - self.assertEqual(after.path(), [{'kind': _KIND, 'name': _NAME_AFTER}]) + key = self._makeOne('KIND', _NAME) + self.assertFalse(key.is_partial) def test_id_or_name_no_name_or_id(self): - key = self._makeOne() - self.assertEqual(key.id_or_name(), None) + key = self._makeOne('KIND') + self.assertEqual(key.id_or_name, None) def test_id_or_name_no_name_or_id_child(self): - _KIND = 'KIND' - _NAME = 'NAME' - _ID = 5678 - _PATH = [{'kind': _KIND, 'id': _ID, 'name': _NAME}, {'kind': ''}] - key = self._makeOne(path=_PATH) - self.assertEqual(key.id_or_name(), None) + key = self._makeOne('KIND1', 1234, 'KIND2') + self.assertEqual(key.id_or_name, None) def test_id_or_name_w_id_only(self): - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(path=_PATH) - self.assertEqual(key.id_or_name(), _ID) - - def test_id_or_name_w_id_and_name(self): - _KIND = 'KIND' _ID = 1234 - _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'id': _ID, 'name': _NAME}] - key = self._makeOne(path=_PATH) - self.assertEqual(key.id_or_name(), _ID) + key = self._makeOne('KIND', _ID) + self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): - _KIND = 'KIND' _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - key = self._makeOne(path=_PATH) - self.assertEqual(key.id_or_name(), _NAME) + key = self._makeOne('KIND', _NAME) + self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._makeOne() - self.assertEqual(key.parent(), None) + key = self._makeOne('KIND') + self.assertEqual(key.parent, None) def test_parent_explicit_top_level(self): - key = self._getTargetClass().from_path('abc', 'def') - self.assertEqual(key.parent(), None) + key = self._makeOne('KIND', 1234) + self.assertEqual(key.parent, None) def test_parent_explicit_nested(self): - key = self._getTargetClass().from_path('abc', 'def', 'ghi', 123) - self.assertEqual(key.parent().path(), [{'kind': 'abc', 'name': 'def'}]) + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + self.assertEqual(key.parent.path, _PARENT_PATH) + + def test_parent_multiple_calls(self): + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + parent = key.parent + self.assertEqual(parent.path, _PARENT_PATH) + new_parent = key.parent + self.assertTrue(parent is new_parent) diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index e0b2f4702a5a..c627a3e0f588 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -18,8 +18,10 @@ class TestQuery(unittest2.TestCase): def _getTargetClass(self): + from gcloud.datastore import _implicit_environ from gcloud.datastore.query import Query + _implicit_environ.DATASET = None return Query def _makeOne(self, kind=None, dataset=None, namespace=None): @@ -160,16 +162,41 @@ def test_filter_w_whitespace_property_name(self): self.assertEqual(p_pb.value.string_value, u'John') self.assertEqual(p_pb.operator, datastore_pb.PropertyFilter.EQUAL) + def test_filter___key__valid_key(self): + from gcloud.datastore.key import Key + from gcloud.datastore import test_connection + + query = self._makeOne() + key = Key('Foo', dataset_id='DATASET') + new_query = query.filter('__key__', '=', key) + + query_pb = new_query._pb + all_filters = query_pb.filter.composite_filter.filter + self.assertEqual(len(all_filters), 1) + + prop_filter = all_filters[0].property_filter + value_fields = prop_filter.value._fields + self.assertEqual(len(value_fields), 1) + field_name, field_value = value_fields.popitem() + self.assertEqual(field_name.name, 'key_value') + + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), field_value) + + def test_filter___key__invalid_value(self): + query = self._makeOne() + self.assertRaises(TypeError, query.filter, '__key__', '=', None) + def test_ancestor_w_non_key_non_list(self): query = self._makeOne() self.assertRaises(TypeError, query.ancestor, object()) def test_ancestor_wo_existing_ancestor_query_w_key_and_propfilter(self): from gcloud.datastore.key import Key - _KIND = 'KIND' - _ID = 123 + from gcloud.datastore import test_connection + _NAME = u'NAME' - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key('KIND', 123, dataset_id='DATASET') query = self._makeOne().filter('name', '=', _NAME) after = query.ancestor(key) self.assertFalse(after is query) @@ -182,13 +209,14 @@ def test_ancestor_wo_existing_ancestor_query_w_key_and_propfilter(self): self.assertEqual(p_pb.value.string_value, _NAME) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') - self.assertEqual(p_pb.value.key_value, key.to_protobuf()) + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), p_pb.value.key_value) def test_ancestor_wo_existing_ancestor_query_w_key(self): from gcloud.datastore.key import Key - _KIND = 'KIND' - _ID = 123 - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + from gcloud.datastore import test_connection + + key = Key('KIND', 123, dataset_id='DATASET') query = self._makeOne() after = query.ancestor(key) self.assertFalse(after is query) @@ -198,14 +226,23 @@ def test_ancestor_wo_existing_ancestor_query_w_key(self): f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') - self.assertEqual(p_pb.value.key_value, key.to_protobuf()) + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), p_pb.value.key_value) def test_ancestor_wo_existing_ancestor_query_w_list(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key + from gcloud.datastore import test_connection + + # Query doesn't have dataset attached. + query = self._makeOne() + + # All keys will have dataset attached. + _implicit_environ.DATASET = Dataset('DATASET') _KIND = 'KIND' _ID = 123 - key = Key(path=[{'kind': _KIND, 'id': _ID}]) - query = self._makeOne() + key = Key(_KIND, _ID) after = query.ancestor([_KIND, _ID]) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -214,12 +251,20 @@ def test_ancestor_wo_existing_ancestor_query_w_list(self): f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') - self.assertEqual(p_pb.value.key_value, key.to_protobuf()) + test_connection._compare_key_pb_after_request( + self, key.to_protobuf(), p_pb.value.key_value) def test_ancestor_clears_existing_ancestor_query_w_only(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset + _KIND = 'KIND' _ID = 123 query = self._makeOne() + + # All keys will have dataset attached. + _implicit_environ.DATASET = Dataset('DATASET') + between = query.ancestor([_KIND, _ID]) after = between.ancestor(None) self.assertFalse(after is query) @@ -228,10 +273,17 @@ def test_ancestor_clears_existing_ancestor_query_w_only(self): self.assertEqual(list(q_pb.filter.composite_filter.filter), []) def test_ancestor_clears_existing_ancestor_query_w_others(self): + from gcloud.datastore import _implicit_environ + from gcloud.datastore.dataset import Dataset + _KIND = 'KIND' _ID = 123 _NAME = u'NAME' query = self._makeOne().filter('name', '=', _NAME) + + # All keys will have dataset attached. + _implicit_environ.DATASET = Dataset('DATASET') + between = query.ancestor([_KIND, _ID]) after = between.ancestor(None) self.assertFalse(after is query) @@ -317,6 +369,7 @@ def _fetch_page_helper(self, cursor=b'\x00', limit=None, _ID = 123 _NAMESPACE = 'NAMESPACE' entity_pb = Entity() + entity_pb.key.partition_id.dataset_id = _DATASET path_element = entity_pb.key.path_element.add() path_element.kind = _KIND path_element.id = _ID @@ -339,7 +392,7 @@ def _fetch_page_helper(self, cursor=b'\x00', limit=None, self.assertEqual(more_results, _MORE_RESULTS) self.assertEqual(len(entities), 1) - self.assertEqual(entities[0].key().path(), + self.assertEqual(entities[0].key().path, [{'kind': _KIND, 'id': _ID}]) limited_query = query if limit is not None: diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 7d39ef1429eb..c67860b1c845 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -38,6 +38,15 @@ def test_ctor(self): self.assertEqual(len(xact._auto_id_entities), 0) self.assertTrue(xact.connection() is connection) + def test_ctor_with_env(self): + SENTINEL_VAL = object() + + from gcloud.datastore import _implicit_environ + _implicit_environ.DATASET = SENTINEL_VAL + + transaction = self._makeOne(dataset=None) + self.assertEqual(transaction.dataset(), SENTINEL_VAL) + def test_add_auto_id_entity(self): entity = _Entity() _DATASET = 'DATASET' @@ -85,7 +94,8 @@ def test_commit_w_auto_ids(self): _KIND = 'KIND' _ID = 123 connection = _Connection(234) - connection._commit_result = _CommitResult(_makeKey(_KIND, _ID)) + connection._commit_result = _CommitResult( + _make_key(_KIND, _ID, _DATASET)) dataset = _Dataset(_DATASET, connection) xact = self._makeOne(dataset) entity = _Entity() @@ -148,10 +158,11 @@ class Foo(Exception): self.assertEqual(xact.id(), None) -def _makeKey(kind, id): +def _make_key(kind, id, dataset_id): from gcloud.datastore.datastore_v1_pb2 import Key key = Key() + key.partition_id.dataset_id = dataset_id elem = key.path_element.add() elem.kind = kind elem.id = id @@ -202,21 +213,11 @@ def __init__(self, *new_keys): self.insert_auto_id_key = new_keys -class _Key(object): - _path = None - - def path(self, path): - self._path = path - return self - - class _Entity(object): - _marker = object() def __init__(self): + from gcloud.datastore.test_entity import _Key self._key = _Key() - def key(self, key=_marker): - if key is self._marker: - return self._key + def key(self, key=None): self._key = key diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index 67de81b7af3c..da8c61395ed1 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -14,11 +14,12 @@ """Create / interact with gcloud datastore transactions.""" +from gcloud.datastore import _implicit_environ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers -class Transaction(object): +class Transaction(_implicit_environ._DatastoreBase): """An abstraction representing datastore Transactions. Transactions can be used to build up a bulk mutuation as well as @@ -62,8 +63,8 @@ class Transaction(object): >>> with dataset.transaction(): ... entity = dataset.entity('Thing') ... entity.save() - ... assert entity.key().is_partial() # There is no ID on this key. - >>> assert not entity.key().is_partial() # There *is* an ID. + ... assert entity.key().is_partial # There is no ID on this key. + >>> assert not entity.key().is_partial # There *is* an ID. .. warning:: If you're using the automatically generated ID functionality, it's important that you only use @@ -125,8 +126,9 @@ class Transaction(object): :param dataset: The dataset to which this :class:`Transaction` belongs. """ - def __init__(self, dataset): - self._dataset = dataset + def __init__(self, dataset=None): + super(Transaction, self).__init__(dataset=dataset) + # If self._dataset is None, using this transaction will fail. self._id = None self._mutation = datastore_pb.Mutation() self._auto_id_entities = [] @@ -233,7 +235,7 @@ def commit(self): for i, entity in enumerate(self._auto_id_entities): key_pb = result.insert_auto_id_key[i] key = helpers.key_from_protobuf(key_pb) - entity.key(entity.key().path(key.path())) + entity.key(key) # DJH: Need to validate against old key. # Tell the connection that the transaction is over. self.connection().transaction(None) diff --git a/pylintrc_default b/pylintrc_default index 12eb9eb058d2..801da4203c3f 100644 --- a/pylintrc_default +++ b/pylintrc_default @@ -1,4 +1,4 @@ -# PyLint config for 'gcloud' *library* code. +# PyLint config for 'gcloud' *library* code. # # NOTES: # @@ -65,14 +65,14 @@ ignore = # DEFAULT: disable= # RATIONALE: # - maybe-no-member: bi-modal functions confuse pylint type inference. -# - no-member: indirections in protobuf-generated code +# - no-member: indirections in protobuf-generated code # - protected-access: helpers use '_foo' of classes from generated code. # - redefined-builtin: use of 'id', 'type', 'filter' args in API-bound funcs; # use of 'NotImplemented' to map HTTP response code. # - similarities: 'Bucket' and 'Key' define 'metageneration' and 'owner' with # identical implementation but different docstrings. # - star-args: standard Python idioms for varargs: -# ancestor = Key.from_path(*ancestor) +# ancestor = Key(*key_args) disable = maybe-no-member, no-member, @@ -201,7 +201,7 @@ max-module-lines=1500 # Good variable names which should always be accepted, separated by a comma # DEFAULT: good-names=i,j,k,ex,Run,_ -# RATIONALE: 'pb' and 'id' have well-understood meainings in the code. +# RATIONALE: 'pb' and 'id' have well-understood meainings in the code. good-names = i, j, k, ex, Run, _, pb, id, diff --git a/regression/clear_datastore.py b/regression/clear_datastore.py index ba3efedd55da..ecc75ada54b4 100644 --- a/regression/clear_datastore.py +++ b/regression/clear_datastore.py @@ -45,7 +45,7 @@ def fetch_keys(dataset, kind, fetch_max=FETCH_MAX, query=None, cursor=None): def get_ancestors(entities): # NOTE: A key will always have at least one path element. - key_roots = [entity.key().path()[0] for entity in entities] + key_roots = [entity.key().path[0] for entity in entities] # Turn into hashable type so we can use set to get unique roots. # Also sorted the items() to ensure uniqueness. key_roots = [tuple(sorted(root.items())) for root in key_roots] diff --git a/regression/datastore.py b/regression/datastore.py index 79f9eea476d8..8966fd297ad3 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -17,23 +17,20 @@ import unittest2 from gcloud import datastore +datastore._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' +datastore._set_dataset_from_environ() # This assumes the command is being run via tox hence the # repository root is the current directory. from regression import populate_datastore -from regression import regression_utils class TestDatastore(unittest2.TestCase): - @classmethod - def setUpClass(cls): - cls.dataset = regression_utils.get_dataset() - def setUp(self): self.case_entities_to_delete = [] def tearDown(self): - with self.dataset.transaction(): + with datastore.transaction.Transaction(): for entity in self.case_entities_to_delete: entity.delete() @@ -41,16 +38,18 @@ def tearDown(self): class TestDatastoreAllocateIDs(TestDatastore): def test_allocate_ids(self): - incomplete_key = datastore.key.Key(path=[{'kind': 'Kind'}]) - allocated_keys = self.dataset.allocate_ids(incomplete_key, 10) - self.assertEqual(len(allocated_keys), 10) + incomplete_key = datastore.key.Key('Kind') + num_ids = 10 + allocated_keys = datastore.allocate_ids(incomplete_key, num_ids) + self.assertEqual(len(allocated_keys), num_ids) unique_ids = set() for key in allocated_keys: - unique_ids.add(key.id()) - self.assertFalse(key.is_partial()) + unique_ids.add(key.id) + self.assertEqual(key.name, None) + self.assertNotEqual(key.id, None) - self.assertEqual(len(unique_ids), 10) + self.assertEqual(len(unique_ids), num_ids) class TestDatastoreSave(TestDatastore): @@ -65,16 +64,16 @@ def _get_post(self, name=None, key_id=None, post_content=None): 'wordCount': 400, 'rating': 5.0, } - # Create an entity with the given content in our dataset. - entity = self.dataset.entity(kind='Post') - entity.update(post_content) + # Create an entity with the given content. + entity = datastore.entity.Entity(kind='Post') + entity.update_properties(post_content) # Update the entity key. key = None if name is not None: - key = entity.key().name(name) + key = entity.key().complete_key(name) if key_id is not None: - key = entity.key().id(key_id) + key = entity.key().complete_key(key_id) if key is not None: entity.key(key) @@ -88,19 +87,17 @@ def _generic_test_post(self, name=None, key_id=None): self.case_entities_to_delete.append(entity) if name is not None: - self.assertEqual(entity.key().name(), name) + self.assertEqual(entity.key().name, name) if key_id is not None: - self.assertEqual(entity.key().id(), key_id) - retrieved_entity = self.dataset.get_entity(entity.key()) + self.assertEqual(entity.key().id, key_id) + retrieved_entity = datastore.get_entity(entity.key()) # Check the keys are the same. - self.assertEqual(retrieved_entity.key().path(), entity.key().path()) - self.assertEqual(retrieved_entity.key().namespace(), - entity.key().namespace()) + self.assertEqual(retrieved_entity.key().path, entity.key().path) + self.assertEqual(retrieved_entity.key().namespace, + entity.key().namespace) # Check the data is the same. - retrieved_dict = dict(retrieved_entity.items()) - entity_dict = dict(entity.items()) - self.assertEqual(retrieved_dict, entity_dict) + self.assertEqual(retrieved_entity.to_dict(), entity.to_dict()) def test_post_with_name(self): self._generic_test_post(name='post1') @@ -112,7 +109,7 @@ def test_post_with_generated_id(self): self._generic_test_post() def test_save_multiple(self): - with self.dataset.transaction(): + with datastore.transaction.Transaction(): entity1 = self._get_post() entity1.save() # Register entity to be deleted. @@ -133,26 +130,26 @@ def test_save_multiple(self): self.case_entities_to_delete.append(entity2) keys = [entity1.key(), entity2.key()] - matches = self.dataset.get_entities(keys) + matches = datastore.get_entities(keys) self.assertEqual(len(matches), 2) def test_empty_kind(self): - posts = self.dataset.query('Post').limit(2).fetch() + posts = datastore.query.Query(kind='Post').limit(2).fetch() self.assertEqual(posts, []) class TestDatastoreSaveKeys(TestDatastore): def test_save_key_self_reference(self): - key = datastore.key.Key.from_path('Person', 'name') - entity = self.dataset.entity(kind=None).key(key) + key = datastore.key.Key('Person', 'name') + entity = datastore.entity.Entity(kind=None).key(key) entity['fullName'] = u'Full name' entity['linkedTo'] = key # Self reference. entity.save() self.case_entities_to_delete.append(entity) - query = self.dataset.query('Person').filter( + query = datastore.query.Query(kind='Person').filter( 'linkedTo', '=', key).limit(2) stored_persons = query.fetch() @@ -160,8 +157,8 @@ def test_save_key_self_reference(self): stored_person = stored_persons[0] self.assertEqual(stored_person['fullName'], entity['fullName']) - self.assertEqual(stored_person.key().path(), key.path()) - self.assertEqual(stored_person.key().namespace(), key.namespace()) + self.assertEqual(stored_person.key().path, key.path) + self.assertEqual(stored_person.key().namespace, key.namespace) class TestDatastoreQuery(TestDatastore): @@ -170,11 +167,11 @@ class TestDatastoreQuery(TestDatastore): def setUpClass(cls): super(TestDatastoreQuery, cls).setUpClass() cls.CHARACTERS = populate_datastore.CHARACTERS - cls.ANCESTOR_KEY = datastore.key.Key( - path=[populate_datastore.ANCESTOR]) + cls.ANCESTOR_KEY = datastore.key.Key(*populate_datastore.ANCESTOR) def _base_query(self): - return self.dataset.query('Character').ancestor(self.ANCESTOR_KEY) + return datastore.query.Query(kind='Character').ancestor( + self.ANCESTOR_KEY) def test_limit_queries(self): limit = 5 @@ -217,8 +214,7 @@ def test_ancestor_query(self): self.assertEqual(len(entities), expected_matches) def test_query___key___filter(self): - rickard_key = datastore.key.Key( - path=[populate_datastore.ANCESTOR, populate_datastore.RICKARD]) + rickard_key = datastore.key.Key(*populate_datastore.RICKARD) query = self._base_query().filter('__key__', '=', rickard_key) expected_matches = 1 @@ -249,32 +245,30 @@ def test_projection_query(self): self.assertEqual(len(entities), expected_matches) arya_entity = entities[0] - arya_dict = dict(arya_entity.items()) - self.assertEqual(arya_dict, {'name': 'Arya', 'family': 'Stark'}) + self.assertEqual(arya_entity.to_dict(), + {'name': 'Arya', 'family': 'Stark'}) catelyn_stark_entity = entities[2] - catelyn_stark_dict = dict(catelyn_stark_entity.items()) - self.assertEqual(catelyn_stark_dict, + self.assertEqual(catelyn_stark_entity.to_dict(), {'name': 'Catelyn', 'family': 'Stark'}) catelyn_tully_entity = entities[3] - catelyn_tully_dict = dict(catelyn_tully_entity.items()) - self.assertEqual(catelyn_tully_dict, + self.assertEqual(catelyn_tully_entity.to_dict(), {'name': 'Catelyn', 'family': 'Tully'}) # Check both Catelyn keys are the same. catelyn_stark_key = catelyn_stark_entity.key() catelyn_tully_key = catelyn_tully_entity.key() - self.assertEqual(catelyn_stark_key.path(), catelyn_tully_key.path()) - self.assertEqual(catelyn_stark_key.namespace(), - catelyn_tully_key.namespace()) + self.assertEqual(catelyn_stark_key.path, catelyn_tully_key.path) + self.assertEqual(catelyn_stark_key.namespace, + catelyn_tully_key.namespace) # Also check the _dataset_id since both retrieved from datastore. - self.assertEqual(catelyn_stark_key._dataset_id, - catelyn_tully_key._dataset_id) + self.assertEqual(catelyn_stark_key.dataset_id, + catelyn_tully_key.dataset_id) sansa_entity = entities[8] - sansa_dict = dict(sansa_entity.items()) - self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'}) + self.assertEqual(sansa_entity.to_dict(), + {'name': 'Sansa', 'family': 'Stark'}) def test_query_paginate_with_offset(self): query = self._base_query() @@ -334,19 +328,17 @@ def test_query_group_by(self): class TestDatastoreTransaction(TestDatastore): def test_transaction(self): - key = datastore.key.Key.from_path('Company', 'Google') - entity = self.dataset.entity(kind=None).key(key) + key = datastore.key.Key('Company', 'Google') + entity = datastore.entity.Entity(kind=None).key(key) entity['url'] = u'www.google.com' - with self.dataset.transaction(): - retrieved_entity = self.dataset.get_entity(key) + with datastore.transaction.Transaction(): + retrieved_entity = datastore.get_entity(key) if retrieved_entity is None: entity.save() self.case_entities_to_delete.append(entity) # This will always return after the transaction. - retrieved_entity = self.dataset.get_entity(key) - retrieved_dict = dict(retrieved_entity.items()) - entity_dict = dict(entity.items()) - self.assertEqual(retrieved_dict, entity_dict) + retrieved_entity = datastore.get_entity(key) + self.assertEqual(retrieved_entity.to_dict(), entity.to_dict()) retrieved_entity.delete() diff --git a/regression/populate_datastore.py b/regression/populate_datastore.py index 62ed9053d945..4cdfc36ee5e1 100644 --- a/regression/populate_datastore.py +++ b/regression/populate_datastore.py @@ -22,24 +22,18 @@ from regression import regression_utils -ANCESTOR = {'kind': 'Book', 'name': 'GoT'} -RICKARD = {'kind': 'Character', 'name': 'Rickard'} -EDDARD = {'kind': 'Character', 'name': 'Eddard'} +ANCESTOR = ('Book', 'GoT') +RICKARD = ANCESTOR + ('Character', 'Rickard') +EDDARD = RICKARD + ('Character', 'Eddard') KEY_PATHS = [ - [ANCESTOR, RICKARD], - [ANCESTOR, RICKARD, EDDARD], - [ANCESTOR, - {'kind': 'Character', 'name': 'Catelyn'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Arya'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Sansa'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Robb'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Bran'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Jon Snow'}], + RICKARD, + EDDARD, + ANCESTOR + ('Character', 'Catelyn'), + EDDARD + ('Character', 'Arya'), + EDDARD + ('Character', 'Sansa'), + EDDARD + ('Character', 'Robb'), + EDDARD + ('Character', 'Bran'), + EDDARD + ('Character', 'Jon Snow'), ] CHARACTERS = [ { @@ -90,12 +84,12 @@ def add_characters(): dataset = regression_utils.get_dataset() with dataset.transaction(): for key_path, character in zip(KEY_PATHS, CHARACTERS): - if key_path[-1]['name'] != character['name']: + if key_path[-1] != character['name']: raise ValueError(('Character and key don\'t agree', key_path, character)) - key = datastore.key.Key(path=key_path) + key = datastore.key.Key(*key_path, dataset_id=dataset.id()) entity = datastore.entity.Entity(dataset=dataset).key(key) - entity.update(character) + entity.update_properties(character) entity.save() print('Adding Character %s %s' % (character['name'], character['family']))