Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix 3 and 451 #454

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 75 additions & 0 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,34 @@
which represents a lookup or search over the rows in the datastore.
"""

import os

from gcloud import credentials
from gcloud.datastore import _implicit_environ
from gcloud.datastore.connection import Connection


SCOPE = ('https://www.googleapis.com/auth/datastore ',
'https://www.googleapis.com/auth/userinfo.email')
"""The scope required for authenticating as a Cloud Datastore consumer."""

_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID'


def _set_dataset_from_environ():
"""Determines auth settings from local enviroment.

Currently only supports enviroment variable but will implicitly
support App Engine, Compute Engine and other environments in
the future.

Local environment variable used is:
- GCLOUD_DATASET_ID
"""
local_dataset_id = os.getenv(_DATASET_ENV_VAR_NAME)
if local_dataset_id is not None:
_implicit_environ.DATASET = get_dataset(local_dataset_id)


def get_connection():
"""Shortcut method to establish a connection to the Cloud Datastore.
Expand Down Expand Up @@ -97,3 +117,58 @@ def get_dataset(dataset_id):
"""
connection = get_connection()
return connection.dataset(dataset_id)


def _require_dataset():
"""Convenience method to ensure DATASET is set.

:rtype: :class:`gcloud.datastore.dataset.Dataset`
:returns: A dataset based on the current environment.
:raises: :class:`EnvironmentError` if DATASET is not set.
"""
if _implicit_environ.DATASET is None:
raise EnvironmentError('Dataset could not be implied.')
return _implicit_environ.DATASET


def get_entity(key):
"""Retrieves entity from implicit dataset, along with its attributes.

:type key: :class:`gcloud.datastore.key.Key`
:param key: The name of the item to retrieve.

:rtype: :class:`gcloud.datastore.entity.Entity` or ``None``
:return: The requested entity, or ``None`` if there was no match found.
"""
return _require_dataset().get_entity(key)


def get_entities(keys):
"""Retrieves entities from implied dataset, along with their attributes.

:type keys: list of :class:`gcloud.datastore.key.Key`
:param keys: The name of the item to retrieve.

:rtype: list of :class:`gcloud.datastore.entity.Entity`
:return: The requested entities.
"""
return _require_dataset().get_entities(keys)


def allocate_ids(incomplete_key, num_ids):
"""Allocates a list of IDs from a partial key.

:type incomplete_key: A :class:`gcloud.datastore.key.Key`
:param incomplete_key: The partial key to use as base for allocated IDs.

:type num_ids: A :class:`int`.
:param num_ids: The number of IDs to allocate.

:rtype: list of :class:`gcloud.datastore.key.Key`
:return: The (complete) keys allocated with `incomplete_key` as root.
"""
return _require_dataset().allocate_ids(incomplete_key, num_ids)


# Set DATASET if it can be implied from the environment.
_set_dataset_from_environ()
24 changes: 24 additions & 0 deletions gcloud/datastore/_implicit_environ.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""Module to provide implicit behavior based on enviroment.

Acts as a mutable namespace to allow the datastore package to
imply the current dataset from the enviroment.

Also provides a base class for classes in the `datastore` package
which could utilize the implicit enviroment.
"""


DATASET = None
"""Module global to allow persistent implied dataset from enviroment."""


class _DatastoreBase(object):
"""Base for all classes in the datastore package.

Uses the implicit DATASET object as a default dataset attached
to the instances being created. Stores the dataset passed in
on the protected (i.e. non-public) attribute `_dataset`.
"""

def __init__(self, dataset=None):
self._dataset = dataset or DATASET
17 changes: 9 additions & 8 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,8 +241,7 @@ def lookup(self, dataset_id, key_pbs,
if single_key:
key_pbs = [key_pbs]

for key_pb in key_pbs:
lookup_request.key.add().CopyFrom(key_pb)
helpers._add_keys_to_request(lookup_request.key, key_pbs)

results, missing_found, deferred_found = self._lookup(
lookup_request, dataset_id, deferred is not None)
Expand Down Expand Up @@ -417,8 +416,7 @@ def allocate_ids(self, dataset_id, key_pbs):
:returns: An equal number of keys, with IDs filled in by the backend.
"""
request = datastore_pb.AllocateIdsRequest()
for key_pb in key_pbs:
request.key.add().CopyFrom(key_pb)
helpers._add_keys_to_request(request.key, key_pbs)
# Nothing to do with this response, so just execute the method.
response = self._rpc(dataset_id, 'allocateIds', request,
datastore_pb.AllocateIdsResponse)
Expand All @@ -444,8 +442,14 @@ def save_entity(self, dataset_id, key_pb, properties,
:type exclude_from_indexes: sequence of str
:param exclude_from_indexes: Names of properties *not* to be indexed.
:rtype: bool or :class:`gcloud.datastore.datastore_v1_pb2.Key`
:returns: True if the save succeeds, unless a new ID has been
automatically allocated. In the auto ID case, the newly
created key protobuf is returned.
"""
mutation = self.mutation()
key_pb = helpers._prepare_key_for_request(key_pb)

# If the Key is complete, we should upsert
# instead of using insert_auto_id.
Expand Down Expand Up @@ -506,10 +510,7 @@ def delete_entities(self, dataset_id, key_pbs):
:returns: True
"""
mutation = self.mutation()

for key_pb in key_pbs:
delete = mutation.delete.add()
delete.CopyFrom(key_pb)
helpers._add_keys_to_request(mutation.delete, key_pbs)

if not self.transaction():
self.commit(dataset_id, mutation)
Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def get_entity(self, key_or_path):
if isinstance(key_or_path, Key):
entities = self.get_entities([key_or_path])
else:
key = Key.from_path(*key_or_path)
key = Key(*key_or_path)
entities = self.get_entities([key])

if entities:
Expand Down Expand Up @@ -196,7 +196,7 @@ def allocate_ids(self, incomplete_key, num_ids):
:return: The (complete) keys allocated with `incomplete_key` as root.
:raises: `ValueError` if `incomplete_key` is not a partial key.
"""
if not incomplete_key.is_partial():
if not incomplete_key.is_partial:
raise ValueError(('Key is not partial.', incomplete_key))

incomplete_key_pb = incomplete_key.to_protobuf()
Expand All @@ -206,5 +206,5 @@ def allocate_ids(self, incomplete_key, num_ids):
self.id(), incomplete_key_pbs)
allocated_ids = [allocated_key_pb.path_element[-1].id
for allocated_key_pb in allocated_key_pbs]
return [incomplete_key.id(allocated_id)
return [incomplete_key.complete_key(allocated_id)
for allocated_id in allocated_ids]
94 changes: 64 additions & 30 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

"""Class for representing a single entity in the Cloud Datastore."""

from gcloud.datastore import _implicit_environ
from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore.key import Key

Expand All @@ -26,7 +27,7 @@ class NoDataset(RuntimeError):
"""Exception raised by Entity methods which require a dataset."""


class Entity(dict):
class Entity(_implicit_environ._DatastoreBase):
"""Entities are akin to rows in a relational database

An entity storing the actual instance of data.
Expand All @@ -41,9 +42,9 @@ class Entity(dict):
Entities in this API act like dictionaries with extras built in that
allow you to delete or persist the data stored on the entity.

Entities are mutable and act like a subclass of a dictionary.
This means you could take an existing entity and change the key
to duplicate the object.
Entities are mutable and properties can be set, updated and deleted
like keys in a dictionary. This means you could take an existing entity
and change the key to duplicate the object.

Use :func:`gcloud.datastore.dataset.Dataset.get_entity`
to retrieve an existing entity.
Expand All @@ -59,10 +60,9 @@ class Entity(dict):
>>> entity
<Entity[{'kind': 'EntityKind', id: 1234}] {'age': 20, 'name': 'JJ'}>

And you can convert an entity to a regular Python dictionary with the
`dict` builtin:
And you can convert an entity to a regular Python dictionary

>>> dict(entity)
>>> entity.to_dict()
{'age': 20, 'name': 'JJ'}

.. note::
Expand Down Expand Up @@ -94,14 +94,60 @@ class Entity(dict):
"""

def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
super(Entity, self).__init__()
self._dataset = dataset
super(Entity, self).__init__(dataset=dataset)
self._data = {}
if kind:
self._key = Key().kind(kind)
# This is temporary since the dataset will eventually be 100%
# removed from the Entity and the Dataset class may be
# destroyed.
self._key = Key(kind, dataset_id=self.dataset().id())
else:
self._key = None
self._exclude_from_indexes = set(exclude_from_indexes)

def __getitem__(self, item_name):
return self._data[item_name]

def __setitem__(self, item_name, value):
self._data[item_name] = value

def __delitem__(self, item_name):
del self._data[item_name]

def clear_properties(self):
"""Clear all properties from the Entity."""
self._data.clear()

def update_properties(self, *args, **kwargs):
"""Allows entity properties to be updated in bulk.

Either takes a single dictionary or uses the keywords passed in.

>>> entity
<Entity[{'kind': 'Foo', 'id': 1}] {}>
>>> entity.update_properties(prop1=u'bar', prop2=u'baz')
>>> entity
<Entity[{'kind': 'Foo', 'id': 1}] {'prop1': u'bar', 'prop2': u'baz'}>
>>> entity.update_properties({'prop1': 0, 'prop2': 1})
>>> entity
<Entity[{'kind': 'Foo', 'id': 1}] {'prop1': 0, 'prop2': 1}>

:raises: `TypeError` a mix of positional and keyword arguments are
used or if more than one positional argument is used.
"""
if args and kwargs or len(args) > 1:
raise TypeError('Only a single dictionary or keyword arguments '
'may be used')
if args:
dict_arg, = args
self._data.update(dict_arg)
else:
self._data.update(kwargs)

def to_dict(self):
"""Converts the stored properties to a dictionary."""
return self._data.copy()

def dataset(self):
"""Get the :class:`.dataset.Dataset` in which this entity belongs.

Expand Down Expand Up @@ -150,7 +196,7 @@ def kind(self):
"""

if self._key:
return self._key.kind()
return self._key.kind

def exclude_from_indexes(self):
"""Names of fields which are *not* to be indexed for this entity.
Expand Down Expand Up @@ -215,7 +261,7 @@ def reload(self):
entity = dataset.get_entity(key.to_protobuf())

if entity:
self.update(entity)
self.update_properties(entity.to_dict())
return self

def save(self):
Expand All @@ -241,29 +287,18 @@ def save(self):
key_pb = connection.save_entity(
dataset_id=dataset.id(),
key_pb=key.to_protobuf(),
properties=dict(self),
properties=self.to_dict(),
exclude_from_indexes=self.exclude_from_indexes())

# If we are in a transaction and the current entity needs an
# automatically assigned ID, tell the transaction where to put that.
transaction = connection.transaction()
if transaction and key.is_partial():
if transaction and key.is_partial:
transaction.add_auto_id_entity(self)

if isinstance(key_pb, datastore_pb.Key):
# Update the path (which may have been altered).
# NOTE: The underlying namespace can't have changed in a save().
# The value of the dataset ID may have changed from implicit
# (i.e. None, with the ID implied from the dataset.Dataset
# object associated with the Entity/Key), but if it was
# implicit before the save() we leave it as implicit.
path = []
for element in key_pb.path_element:
key_part = {}
for descriptor, value in element._fields.items():
key_part[descriptor.name] = value
path.append(key_part)
self._key = key.path(path)
# Update the key (which may have been altered).
self._key = self.key().compare_to_proto(key_pb)

return self

Expand All @@ -284,7 +319,6 @@ def delete(self):

def __repr__(self):
if self._key:
return '<Entity%s %s>' % (self._key.path(),
super(Entity, self).__repr__())
return '<Entity%s %r>' % (self._key.path, self._data)
else:
return '<Entity %s>' % (super(Entity, self).__repr__())
return '<Entity %r>' % (self._data,)
Loading