Skip to content

Commit

Permalink
Making cached dataset ID require explicit interaction.
Browse files Browse the repository at this point in the history
Incorporates feedback from @tseaver. Also
- Fixing bad tests that accidentally leave side-effects.
- Making the method to set the default dataset public.
  • Loading branch information
dhermes committed Dec 23, 2014
1 parent d85d200 commit 7cd77f2
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 66 deletions.
27 changes: 15 additions & 12 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,19 +58,26 @@
_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID'


def _set_dataset_from_environ():
def set_default_dataset(dataset_id=None):
"""Determines auth settings from local enviroment.
Currently only supports enviroment variable but will implicitly
support App Engine, Compute Engine and other environments in
the future.
Sets a default dataset either explicitly or via the local
enviroment. Currently only supports enviroment variable but will
implicitly support App Engine, Compute Engine and other environments
in the future.
Local environment variable used is:
- GCLOUD_DATASET_ID
:type dataset_id: :class:`str`.
:param dataset_id: Optional. The dataset ID to use for the default
dataset.
"""
local_dataset_id = os.getenv(_DATASET_ENV_VAR_NAME)
if local_dataset_id is not None:
_implicit_environ.DATASET = get_dataset(local_dataset_id)
if dataset_id is None:
dataset_id = os.getenv(_DATASET_ENV_VAR_NAME)

if dataset_id is not None:
_implicit_environ.DATASET = get_dataset(dataset_id)


def get_connection():
Expand Down Expand Up @@ -127,7 +134,7 @@ def _require_dataset():
:raises: :class:`EnvironmentError` if DATASET is not set.
"""
if _implicit_environ.DATASET is None:
raise EnvironmentError('Dataset could not be implied.')
raise EnvironmentError('Dataset could not be inferred.')
return _implicit_environ.DATASET


Expand Down Expand Up @@ -168,7 +175,3 @@ def allocate_ids(incomplete_key, num_ids):
:return: The (complete) keys allocated with `incomplete_key` as root.
"""
return _require_dataset().allocate_ids(incomplete_key, num_ids)


# Set DATASET if it can be implied from the environment.
_set_dataset_from_environ()
2 changes: 1 addition & 1 deletion gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ class Entity(dict):

def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
super(Entity, self).__init__()
# Does not inherit from object, so we don't use
# Does not inherit directly from object, so we don't use
# _implicit_environ._DatastoreBase to avoid split MRO.
self._dataset = dataset or _implicit_environ.DATASET
if kind:
Expand Down
66 changes: 21 additions & 45 deletions gcloud/datastore/test___init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,22 @@ def test_it(self):
self.assertTrue(client._get_app_default_called)


class Test__set_dataset_from_environ(unittest2.TestCase):
class Test_set_default_dataset(unittest2.TestCase):

def _callFUT(self):
from gcloud.datastore import _set_dataset_from_environ
return _set_dataset_from_environ()
def setUp(self):
from gcloud.datastore import _implicit_environ
self._replaced_dataset = _implicit_environ.DATASET
_implicit_environ.DATASET = None

def tearDown(self):
from gcloud.datastore import _implicit_environ
_implicit_environ.DATASET = self._replaced_dataset

def _test_with_environ(self, environ, expected_result):
def _callFUT(self, dataset_id=None):
from gcloud.datastore import set_default_dataset
return set_default_dataset(dataset_id=dataset_id)

def _test_with_environ(self, environ, expected_result, dataset_id=None):
import os
from gcloud._testing import _Monkey
from gcloud import datastore
Expand All @@ -53,12 +62,12 @@ def _test_with_environ(self, environ, expected_result):
def custom_getenv(key):
return environ.get(key)

def custom_get_dataset(dataset_id):
return dataset_id
def custom_get_dataset(local_dataset_id):
return local_dataset_id

with _Monkey(os, getenv=custom_getenv):
with _Monkey(datastore, get_dataset=custom_get_dataset):
self._callFUT()
self._callFUT(dataset_id=dataset_id)

self.assertEqual(_implicit_environ.DATASET, expected_result)

Expand All @@ -75,6 +84,10 @@ def test_set_from_env_var(self):
def test_no_env_var_set(self):
self._test_with_environ({}, None)

def test_set_explicit(self):
DATASET_ID = 'DATASET'
self._test_with_environ({}, DATASET_ID, dataset_id=DATASET_ID)


class Test_get_dataset(unittest2.TestCase):

Expand Down Expand Up @@ -161,40 +174,3 @@ def test_allocate_ids(self):

# Check the IDs returned.
self.assertEqual([key.id() for key in result], range(1, NUM_IDS + 1))

def test_set_DATASET(self):
import os
from gcloud._testing import _Monkey
from gcloud.test_credentials import _Client
from gcloud import credentials
from gcloud.datastore import _implicit_environ

# Make custom client for doing auth. Have to fake auth since we
# can't monkey patch `datastore.get_dataset` while reloading the
# `datastore.__init__` module.
client = _Client()

# Fake auth variables.
DATASET = 'dataset'

# Make a custom getenv function to Monkey.
VALUES = {
'GCLOUD_DATASET_ID': DATASET,
}

def custom_getenv(key):
return VALUES.get(key)

# Perform the import again with our test patches.
with _Monkey(credentials, client=client):
with _Monkey(os, getenv=custom_getenv):
import gcloud.datastore
reload(gcloud.datastore)

# Check that the DATASET was correctly implied from the environ.
implicit_dataset = _implicit_environ.DATASET
self.assertEqual(implicit_dataset.id(), DATASET)
# Check that the credentials on the implicit DATASET was set on the
# fake client.
cnxn_credentials = implicit_dataset.connection().credentials
self.assertTrue(cnxn_credentials is client._signed)
12 changes: 9 additions & 3 deletions gcloud/datastore/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,18 @@ class Test_entity_from_protobuf(unittest2.TestCase):

_MARKER = object()

def _callFUT(self, val, dataset=_MARKER):
def setUp(self):
from gcloud.datastore import _implicit_environ
from gcloud.datastore.helpers import entity_from_protobuf

self._replaced_dataset = _implicit_environ.DATASET
_implicit_environ.DATASET = None

def tearDown(self):
from gcloud.datastore import _implicit_environ
_implicit_environ.DATASET = self._replaced_dataset

def _callFUT(self, val, dataset=_MARKER):
from gcloud.datastore.helpers import entity_from_protobuf

if dataset is self._MARKER:
return entity_from_protobuf(val)

Expand Down
12 changes: 9 additions & 3 deletions gcloud/datastore/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,17 @@

class TestQuery(unittest2.TestCase):

def _getTargetClass(self):
def setUp(self):
from gcloud.datastore import _implicit_environ
from gcloud.datastore.query import Query

self._replaced_dataset = _implicit_environ.DATASET
_implicit_environ.DATASET = None

def tearDown(self):
from gcloud.datastore import _implicit_environ
_implicit_environ.DATASET = self._replaced_dataset

def _getTargetClass(self):
from gcloud.datastore.query import Query
return Query

def _makeOne(self, kind=None, dataset=None, namespace=None):
Expand Down
7 changes: 5 additions & 2 deletions regression/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,20 @@
# limitations under the License.

import datetime
import os
import pytz
import unittest2

from gcloud import datastore
datastore._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID'
datastore._set_dataset_from_environ()
# This assumes the command is being run via tox hence the
# repository root is the current directory.
from regression import populate_datastore


DATASET_ID = os.getenv('GCLOUD_TESTS_DATASET_ID')
datastore.set_default_dataset(dataset_id=DATASET_ID)


class TestDatastore(unittest2.TestCase):

def setUp(self):
Expand Down

0 comments on commit 7cd77f2

Please sign in to comment.