diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 5b6d2d9815bb..c9cb11020b07 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -18,9 +18,10 @@ >>> from gcloud import datastore ->>> key = datastore.Key('EntityKind', 1234) +>>> client = datastore.Client() +>>> key = client.key('EntityKind', 1234) >>> entity = datastore.Entity(key) ->>> query = datastore.Query(kind='EntityKind') +>>> query = client.query(kind='EntityKind') The main concepts with this API are: @@ -49,11 +50,6 @@ when race conditions may occur. """ -from gcloud.datastore._implicit_environ import get_connection -from gcloud.datastore._implicit_environ import get_default_connection -from gcloud.datastore._implicit_environ import get_default_dataset_id -from gcloud.datastore._implicit_environ import set_default_connection -from gcloud.datastore._implicit_environ import set_default_dataset_id from gcloud.datastore.batch import Batch from gcloud.datastore.connection import SCOPE from gcloud.datastore.connection import Connection @@ -62,25 +58,3 @@ from gcloud.datastore.key import Key from gcloud.datastore.query import Query from gcloud.datastore.transaction import Transaction - - -def set_defaults(dataset_id=None, connection=None): - """Set defaults either explicitly or implicitly as fall-back. - - Uses the arguments to call the individual default methods - - - set_default_dataset_id - - set_default_connection - - In the future we will likely enable methods like - - - set_default_namespace - - :type dataset_id: string - :param dataset_id: Optional. The dataset ID to use as default. - - :type connection: :class:`gcloud.datastore.connection.Connection` - :param connection: A connection provided to be the default. - """ - set_default_dataset_id(dataset_id=dataset_id) - set_default_connection(connection=connection) diff --git a/gcloud/datastore/_implicit_environ.py b/gcloud/datastore/_implicit_environ.py deleted file mode 100644 index 7dae6554cb67..000000000000 --- a/gcloud/datastore/_implicit_environ.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Module to provide implicit behavior based on enviroment. - -Allows the datastore package to infer the current dataset ID and -connection from the enviroment. -""" - -import os - -from gcloud._helpers import _app_engine_id -from gcloud._helpers import _compute_engine_id -from gcloud._helpers import _lazy_property_deco -from gcloud.datastore.connection import Connection - - -_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' -_GCD_DATASET_ENV_VAR_NAME = 'DATASTORE_DATASET' - - -def _get_production_dataset_id(): - """Gets the production application ID if it can be inferred.""" - return os.getenv(_DATASET_ENV_VAR_NAME) - - -def _get_gcd_dataset_id(): - """Gets the GCD application ID if it can be inferred.""" - return os.getenv(_GCD_DATASET_ENV_VAR_NAME) - - -def _determine_default_dataset_id(dataset_id=None): - """Determine default dataset ID explicitly or implicitly as fall-back. - - In implicit case, supports four environments. In order of precedence, the - implicit environments are: - - * GCLOUD_DATASET_ID environment variable - * DATASTORE_DATASET environment variable (for ``gcd`` testing) - * Google App Engine application ID - * Google Compute Engine project ID (from metadata server) - - :type dataset_id: string - :param dataset_id: Optional. The dataset ID to use as default. - - :rtype: string or ``NoneType`` - :returns: Default dataset ID if it can be determined. - """ - if dataset_id is None: - dataset_id = _get_production_dataset_id() - - if dataset_id is None: - dataset_id = _get_gcd_dataset_id() - - if dataset_id is None: - dataset_id = _app_engine_id() - - if dataset_id is None: - dataset_id = _compute_engine_id() - - return dataset_id - - -def set_default_dataset_id(dataset_id=None): - """Set default dataset ID either explicitly or implicitly as fall-back. - - In implicit case, supports four environments. In order of precedence, the - implicit environments are: - - * GCLOUD_DATASET_ID environment variable - * DATASTORE_DATASET environment variable (for ``gcd`` testing) - * Google App Engine application ID - * Google Compute Engine project ID (from metadata server) - - :type dataset_id: string - :param dataset_id: Optional. The dataset ID to use as default. - - :raises: :class:`EnvironmentError` if no dataset ID was implied. - """ - dataset_id = _determine_default_dataset_id(dataset_id=dataset_id) - if dataset_id is not None: - _DEFAULTS.dataset_id = dataset_id - else: - raise EnvironmentError('No dataset ID could be inferred.') - - -def get_default_dataset_id(): - """Get default dataset ID. - - :rtype: string or ``NoneType`` - :returns: The default dataset ID if one has been set. - """ - return _DEFAULTS.dataset_id - - -def get_connection(): - """Shortcut method to establish a connection to the Cloud Datastore. - - Use this if you are going to access several datasets - with the same set of credentials (unlikely): - - >>> from gcloud import datastore - - >>> connection = datastore.get_connection() - >>> key1 = datastore.Key('Kind', 1234, dataset_id='dataset1') - >>> key2 = datastore.Key('Kind', 1234, dataset_id='dataset2') - >>> entity1 = datastore.get(key1, connection=connection) - >>> entity2 = datastore.get(key2, connection=connection) - - :rtype: :class:`gcloud.datastore.connection.Connection` - :returns: A connection defined with the proper credentials. - """ - return Connection.from_environment() - - -def set_default_connection(connection=None): - """Set default connection either explicitly or implicitly as fall-back. - - :type connection: :class:`gcloud.datastore.connection.Connection` - :param connection: A connection provided to be the default. - """ - connection = connection or get_connection() - _DEFAULTS.connection = connection - - -def get_default_connection(): - """Get default connection. - - :rtype: :class:`gcloud.datastore.connection.Connection` or ``NoneType`` - :returns: The default connection if one has been set. - """ - return _DEFAULTS.connection - - -class _DefaultsContainer(object): - """Container for defaults. - - :type connection: :class:`gcloud.datastore.connection.Connection` - :param connection: Persistent implied connection from environment. - - :type dataset_id: string - :param dataset_id: Persistent implied dataset ID from environment. - - :type implicit: boolean - :param implicit: Boolean indicating if the container should allow - implicit properties. - """ - - @_lazy_property_deco - @staticmethod - def dataset_id(): - """Return the implicit default dataset ID.""" - return _determine_default_dataset_id() - - @_lazy_property_deco - @staticmethod - def connection(): - """Return the implicit default connection..""" - return get_connection() - - def __init__(self, connection=None, dataset_id=None, implicit=False): - if connection is not None or not implicit: - self.connection = connection - if dataset_id is not None or not implicit: - self.dataset_id = dataset_id - - -_DEFAULTS = _DefaultsContainer(implicit=True) diff --git a/gcloud/datastore/_testing.py b/gcloud/datastore/_testing.py deleted file mode 100644 index 97e43222c32e..000000000000 --- a/gcloud/datastore/_testing.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Shared datastore testing utilities.""" - -from gcloud._testing import _Monkey -from gcloud.datastore import _implicit_environ -from gcloud.datastore._implicit_environ import _DefaultsContainer - - -def _monkey_defaults(*args, **kwargs): - mock_defaults = _DefaultsContainer(*args, **kwargs) - return _Monkey(_implicit_environ, _DEFAULTS=mock_defaults) - - -def _setup_defaults(test_case, *args, **kwargs): - test_case._replaced_defaults = _implicit_environ._DEFAULTS - _implicit_environ._DEFAULTS = _DefaultsContainer(*args, **kwargs) - - -def _tear_down_defaults(test_case): - _implicit_environ._DEFAULTS = test_case._replaced_defaults diff --git a/gcloud/datastore/client.py b/gcloud/datastore/client.py index cb7c5a043ab5..ae07af1d2746 100644 --- a/gcloud/datastore/client.py +++ b/gcloud/datastore/client.py @@ -13,20 +13,71 @@ # limitations under the License. """Convenience wrapper for invoking APIs/factories w/ a dataset ID.""" +import os + from gcloud._helpers import _LocalStack +from gcloud._helpers import _app_engine_id +from gcloud._helpers import _compute_engine_id from gcloud.datastore import helpers +from gcloud.datastore.connection import Connection from gcloud.datastore.batch import Batch from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key from gcloud.datastore.query import Query from gcloud.datastore.transaction import Transaction -from gcloud.datastore._implicit_environ import _determine_default_dataset_id -from gcloud.datastore._implicit_environ import get_connection _MAX_LOOPS = 128 """Maximum number of iterations to wait for deferred keys.""" +_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' +"""Environment variable defining default dataset ID.""" + +_GCD_DATASET_ENV_VAR_NAME = 'DATASTORE_DATASET' +"""Environment variable defining default dataset ID under GCD.""" + + +def _get_production_dataset_id(): + """Gets the production application ID if it can be inferred.""" + return os.getenv(_DATASET_ENV_VAR_NAME) + + +def _get_gcd_dataset_id(): + """Gets the GCD application ID if it can be inferred.""" + return os.getenv(_GCD_DATASET_ENV_VAR_NAME) + + +def _determine_default_dataset_id(dataset_id=None): + """Determine default dataset ID explicitly or implicitly as fall-back. + + In implicit case, supports four environments. In order of precedence, the + implicit environments are: + + * GCLOUD_DATASET_ID environment variable + * DATASTORE_DATASET environment variable (for ``gcd`` testing) + * Google App Engine application ID + * Google Compute Engine project ID (from metadata server) + + :type dataset_id: string + :param dataset_id: Optional. The dataset ID to use as default. + + :rtype: string or ``NoneType`` + :returns: Default dataset ID if it can be determined. + """ + if dataset_id is None: + dataset_id = _get_production_dataset_id() + + if dataset_id is None: + dataset_id = _get_gcd_dataset_id() + + if dataset_id is None: + dataset_id = _app_engine_id() + + if dataset_id is None: + dataset_id = _compute_engine_id() + + return dataset_id + def _extended_lookup(connection, dataset_id, key_pbs, missing=None, deferred=None, @@ -126,7 +177,7 @@ def __init__(self, dataset_id=None, namespace=None, connection=None): raise EnvironmentError('Dataset ID could not be inferred.') self.dataset_id = dataset_id if connection is None: - connection = get_connection() + connection = Connection.from_environment() self.connection = connection self._batch_stack = _LocalStack() self.namespace = namespace diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index d1064bb9017f..14d61cfff9d3 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -17,7 +17,6 @@ import copy import six -from gcloud.datastore import _implicit_environ from gcloud.datastore import _datastore_v1_pb2 as datastore_pb @@ -395,15 +394,11 @@ def _validate_dataset_id(dataset_id, parent): :rtype: string :returns: The ``dataset_id`` passed in, or implied from the environment. :raises: :class:`ValueError` if ``dataset_id`` is ``None`` and no dataset - can be inferred. + can be inferred from the parent. """ if parent is None: - if dataset_id is None: - - dataset_id = _implicit_environ.get_default_dataset_id() - if dataset_id is None: - raise ValueError("A Key must have a dataset ID set.") + raise ValueError("A Key must have a dataset ID set.") return dataset_id diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 4adfece8520d..1ae2c43dfa24 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -102,7 +102,7 @@ def dataset_id(self): :rtype: str """ - return self._dataset_id + return self._dataset_id or self._client.dataset_id @property def namespace(self): @@ -111,7 +111,7 @@ def namespace(self): :rtype: string or None :returns: the namespace assigned to this query """ - return self._namespace + return self._namespace or self._client.namespace @namespace.setter def namespace(self, value): diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py deleted file mode 100644 index dd22d2787ad0..000000000000 --- a/gcloud/datastore/test___init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest2 - - -class Test_set_defaults(unittest2.TestCase): - - def _callFUT(self, dataset_id=None, connection=None): - from gcloud.datastore import set_defaults - return set_defaults(dataset_id=dataset_id, connection=connection) - - def test_it(self): - from gcloud._testing import _Monkey - from gcloud import datastore - - DATASET_ID = object() - CONNECTION = object() - - SET_DATASET_CALLED = [] - - def call_set_dataset(dataset_id=None): - SET_DATASET_CALLED.append(dataset_id) - - SET_CONNECTION_CALLED = [] - - def call_set_connection(connection=None): - SET_CONNECTION_CALLED.append(connection) - - with _Monkey(datastore, set_default_dataset_id=call_set_dataset, - set_default_connection=call_set_connection): - self._callFUT(dataset_id=DATASET_ID, connection=CONNECTION) - - self.assertEqual(SET_DATASET_CALLED, [DATASET_ID]) - self.assertEqual(SET_CONNECTION_CALLED, [CONNECTION]) diff --git a/gcloud/datastore/test__implicit_environ.py b/gcloud/datastore/test__implicit_environ.py deleted file mode 100644 index 63f2c8c270f7..000000000000 --- a/gcloud/datastore/test__implicit_environ.py +++ /dev/null @@ -1,354 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest2 - - -class Test_get_default_connection(unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - - def _callFUT(self): - from gcloud.datastore._implicit_environ import get_default_connection - return get_default_connection() - - def test_default(self): - self.assertEqual(self._callFUT(), None) - - def test_preset(self): - from gcloud.datastore._testing import _monkey_defaults - - SENTINEL = object() - with _monkey_defaults(connection=SENTINEL): - self.assertEqual(self._callFUT(), SENTINEL) - - -class Test_get_default_dataset_id(unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - - def _callFUT(self): - from gcloud.datastore._implicit_environ import get_default_dataset_id - return get_default_dataset_id() - - def test_default(self): - self.assertEqual(self._callFUT(), None) - - def test_preset(self): - from gcloud.datastore._testing import _monkey_defaults - - SENTINEL = object() - with _monkey_defaults(dataset_id=SENTINEL): - self.assertEqual(self._callFUT(), SENTINEL) - - -class Test__get_production_dataset_id(unittest2.TestCase): - - def _callFUT(self): - from gcloud.datastore import _implicit_environ - return _implicit_environ._get_production_dataset_id() - - def test_no_value(self): - import os - from gcloud._testing import _Monkey - - environ = {} - with _Monkey(os, getenv=environ.get): - dataset_id = self._callFUT() - self.assertEqual(dataset_id, None) - - def test_value_set(self): - import os - from gcloud._testing import _Monkey - from gcloud.datastore._implicit_environ import _DATASET_ENV_VAR_NAME - - MOCK_DATASET_ID = object() - environ = {_DATASET_ENV_VAR_NAME: MOCK_DATASET_ID} - with _Monkey(os, getenv=environ.get): - dataset_id = self._callFUT() - self.assertEqual(dataset_id, MOCK_DATASET_ID) - - -class Test__get_gcd_dataset_id(unittest2.TestCase): - - def _callFUT(self): - from gcloud.datastore import _implicit_environ - return _implicit_environ._get_gcd_dataset_id() - - def test_no_value(self): - import os - from gcloud._testing import _Monkey - - environ = {} - with _Monkey(os, getenv=environ.get): - dataset_id = self._callFUT() - self.assertEqual(dataset_id, None) - - def test_value_set(self): - import os - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - MOCK_DATASET_ID = object() - environ = { - _implicit_environ._GCD_DATASET_ENV_VAR_NAME: MOCK_DATASET_ID, - } - with _Monkey(os, getenv=environ.get): - dataset_id = self._callFUT() - self.assertEqual(dataset_id, MOCK_DATASET_ID) - - -class Test__determine_default_dataset_id(unittest2.TestCase): - - def _callFUT(self, dataset_id=None): - from gcloud.datastore import _implicit_environ - return _implicit_environ._determine_default_dataset_id( - dataset_id=dataset_id) - - def _determine_default_helper(self, prod=None, gcd=None, gae=None, - gce=None, dataset_id=None): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - _callers = [] - - def prod_mock(): - _callers.append('prod_mock') - return prod - - def gcd_mock(): - _callers.append('gcd_mock') - return gcd - - def gae_mock(): - _callers.append('gae_mock') - return gae - - def gce_mock(): - _callers.append('gce_mock') - return gce - - patched_methods = { - '_get_production_dataset_id': prod_mock, - '_get_gcd_dataset_id': gcd_mock, - '_app_engine_id': gae_mock, - '_compute_engine_id': gce_mock, - } - - with _Monkey(_implicit_environ, **patched_methods): - returned_dataset_id = self._callFUT(dataset_id) - - return returned_dataset_id, _callers - - def test_no_value(self): - dataset_id, callers = self._determine_default_helper() - self.assertEqual(dataset_id, None) - self.assertEqual(callers, - ['prod_mock', 'gcd_mock', 'gae_mock', 'gce_mock']) - - def test_explicit(self): - DATASET_ID = object() - dataset_id, callers = self._determine_default_helper( - dataset_id=DATASET_ID) - self.assertEqual(dataset_id, DATASET_ID) - self.assertEqual(callers, []) - - def test_prod(self): - DATASET_ID = object() - dataset_id, callers = self._determine_default_helper(prod=DATASET_ID) - self.assertEqual(dataset_id, DATASET_ID) - self.assertEqual(callers, ['prod_mock']) - - def test_gcd(self): - DATASET_ID = object() - dataset_id, callers = self._determine_default_helper(gcd=DATASET_ID) - self.assertEqual(dataset_id, DATASET_ID) - self.assertEqual(callers, ['prod_mock', 'gcd_mock']) - - def test_gae(self): - DATASET_ID = object() - dataset_id, callers = self._determine_default_helper(gae=DATASET_ID) - self.assertEqual(dataset_id, DATASET_ID) - self.assertEqual(callers, ['prod_mock', 'gcd_mock', 'gae_mock']) - - def test_gce(self): - DATASET_ID = object() - dataset_id, callers = self._determine_default_helper(gce=DATASET_ID) - self.assertEqual(dataset_id, DATASET_ID) - self.assertEqual(callers, - ['prod_mock', 'gcd_mock', 'gae_mock', 'gce_mock']) - - -class Test_set_default_dataset_id(unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - - def _callFUT(self, dataset_id=None): - from gcloud.datastore._implicit_environ import set_default_dataset_id - return set_default_dataset_id(dataset_id=dataset_id) - - def test_raises(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - _called_dataset_id = [] - - def mock_determine(dataset_id): - _called_dataset_id.append(dataset_id) - return None - - with _Monkey(_implicit_environ, - _determine_default_dataset_id=mock_determine): - self.assertRaises(EnvironmentError, self._callFUT) - - self.assertEqual(_called_dataset_id, [None]) - - def test_set_correctly(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - self.assertEqual(_implicit_environ._DEFAULTS.dataset_id, None) - - DATASET_ID = object() - _called_dataset_id = [] - - def mock_determine(dataset_id): - _called_dataset_id.append(dataset_id) - return DATASET_ID - - with _Monkey(_implicit_environ, - _determine_default_dataset_id=mock_determine): - self._callFUT() - - self.assertEqual(_implicit_environ._DEFAULTS.dataset_id, DATASET_ID) - self.assertEqual(_called_dataset_id, [None]) - - -class Test_lazy_loading(unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self, implicit=True) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - - def test_descriptor_for_dataset_id(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - self.assertFalse( - 'dataset_id' in _implicit_environ._DEFAULTS.__dict__) - - DEFAULT = object() - - with _Monkey(_implicit_environ, - _determine_default_dataset_id=lambda: DEFAULT): - lazy_loaded = _implicit_environ._DEFAULTS.dataset_id - - self.assertEqual(lazy_loaded, DEFAULT) - self.assertTrue( - 'dataset_id' in _implicit_environ._DEFAULTS.__dict__) - - def test_descriptor_for_connection(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - self.assertFalse( - 'connection' in _implicit_environ._DEFAULTS.__dict__) - - DEFAULT = object() - - with _Monkey(_implicit_environ, get_connection=lambda: DEFAULT): - lazy_loaded = _implicit_environ._DEFAULTS.connection - - self.assertEqual(lazy_loaded, DEFAULT) - self.assertTrue( - 'connection' in _implicit_environ._DEFAULTS.__dict__) - - -class Test_get_connection(unittest2.TestCase): - - def _callFUT(self): - from gcloud.datastore._implicit_environ import get_connection - return get_connection() - - def test_it(self): - from gcloud import credentials - from gcloud.datastore.connection import SCOPE - from gcloud.datastore.connection import Connection - from gcloud.test_credentials import _Client - from gcloud._testing import _Monkey - - client = _Client() - with _Monkey(credentials, client=client): - found = self._callFUT() - self.assertTrue(isinstance(found, Connection)) - self.assertTrue(found._credentials is client._signed) - self.assertEqual(found._credentials._scopes, SCOPE) - self.assertTrue(client._get_app_default_called) - - -class Test_set_default_connection(unittest2.TestCase): - - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - - def _callFUT(self, connection=None): - from gcloud.datastore._implicit_environ import set_default_connection - return set_default_connection(connection=connection) - - def test_set_explicit(self): - from gcloud.datastore import _implicit_environ - - self.assertEqual(_implicit_environ.get_default_connection(), None) - fake_cnxn = object() - self._callFUT(connection=fake_cnxn) - self.assertEqual(_implicit_environ.get_default_connection(), fake_cnxn) - - def test_set_implicit(self): - from gcloud._testing import _Monkey - from gcloud.datastore import _implicit_environ - - self.assertEqual(_implicit_environ.get_default_connection(), None) - - fake_cnxn = object() - with _Monkey(_implicit_environ, get_connection=lambda: fake_cnxn): - self._callFUT() - - self.assertEqual(_implicit_environ.get_default_connection(), fake_cnxn) diff --git a/gcloud/datastore/test_client.py b/gcloud/datastore/test_client.py index 611dad5f8845..81ce96f04faa 100644 --- a/gcloud/datastore/test_client.py +++ b/gcloud/datastore/test_client.py @@ -31,6 +31,140 @@ def _make_entity_pb(dataset_id, kind, integer_id, name=None, str_val=None): return entity_pb +class Test__get_production_dataset_id(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore.client import _get_production_dataset_id + return _get_production_dataset_id() + + def test_no_value(self): + import os + from gcloud._testing import _Monkey + + environ = {} + with _Monkey(os, getenv=environ.get): + dataset_id = self._callFUT() + self.assertEqual(dataset_id, None) + + def test_value_set(self): + import os + from gcloud._testing import _Monkey + from gcloud.datastore.client import _DATASET_ENV_VAR_NAME + + MOCK_DATASET_ID = object() + environ = {_DATASET_ENV_VAR_NAME: MOCK_DATASET_ID} + with _Monkey(os, getenv=environ.get): + dataset_id = self._callFUT() + self.assertEqual(dataset_id, MOCK_DATASET_ID) + + +class Test__get_gcd_dataset_id(unittest2.TestCase): + + def _callFUT(self): + from gcloud.datastore.client import _get_gcd_dataset_id + return _get_gcd_dataset_id() + + def test_no_value(self): + import os + from gcloud._testing import _Monkey + + environ = {} + with _Monkey(os, getenv=environ.get): + dataset_id = self._callFUT() + self.assertEqual(dataset_id, None) + + def test_value_set(self): + import os + from gcloud._testing import _Monkey + from gcloud.datastore.client import _GCD_DATASET_ENV_VAR_NAME + + MOCK_DATASET_ID = object() + environ = {_GCD_DATASET_ENV_VAR_NAME: MOCK_DATASET_ID} + with _Monkey(os, getenv=environ.get): + dataset_id = self._callFUT() + self.assertEqual(dataset_id, MOCK_DATASET_ID) + + +class Test__determine_default_dataset_id(unittest2.TestCase): + + def _callFUT(self, dataset_id=None): + from gcloud.datastore.client import _determine_default_dataset_id + return _determine_default_dataset_id(dataset_id=dataset_id) + + def _determine_default_helper(self, prod=None, gcd=None, gae=None, + gce=None, dataset_id=None): + from gcloud._testing import _Monkey + from gcloud.datastore import client + + _callers = [] + + def prod_mock(): + _callers.append('prod_mock') + return prod + + def gcd_mock(): + _callers.append('gcd_mock') + return gcd + + def gae_mock(): + _callers.append('gae_mock') + return gae + + def gce_mock(): + _callers.append('gce_mock') + return gce + + patched_methods = { + '_get_production_dataset_id': prod_mock, + '_get_gcd_dataset_id': gcd_mock, + '_app_engine_id': gae_mock, + '_compute_engine_id': gce_mock, + } + + with _Monkey(client, **patched_methods): + returned_dataset_id = self._callFUT(dataset_id) + + return returned_dataset_id, _callers + + def test_no_value(self): + dataset_id, callers = self._determine_default_helper() + self.assertEqual(dataset_id, None) + self.assertEqual(callers, + ['prod_mock', 'gcd_mock', 'gae_mock', 'gce_mock']) + + def test_explicit(self): + DATASET_ID = object() + dataset_id, callers = self._determine_default_helper( + dataset_id=DATASET_ID) + self.assertEqual(dataset_id, DATASET_ID) + self.assertEqual(callers, []) + + def test_prod(self): + DATASET_ID = object() + dataset_id, callers = self._determine_default_helper(prod=DATASET_ID) + self.assertEqual(dataset_id, DATASET_ID) + self.assertEqual(callers, ['prod_mock']) + + def test_gcd(self): + DATASET_ID = object() + dataset_id, callers = self._determine_default_helper(gcd=DATASET_ID) + self.assertEqual(dataset_id, DATASET_ID) + self.assertEqual(callers, ['prod_mock', 'gcd_mock']) + + def test_gae(self): + DATASET_ID = object() + dataset_id, callers = self._determine_default_helper(gae=DATASET_ID) + self.assertEqual(dataset_id, DATASET_ID) + self.assertEqual(callers, ['prod_mock', 'gcd_mock', 'gae_mock']) + + def test_gce(self): + DATASET_ID = object() + dataset_id, callers = self._determine_default_helper(gce=DATASET_ID) + self.assertEqual(dataset_id, DATASET_ID) + self.assertEqual(callers, + ['prod_mock', 'gcd_mock', 'gae_mock', 'gce_mock']) + + class TestClient(unittest2.TestCase): DATASET_ID = 'DATASET' @@ -55,10 +189,16 @@ def test_ctor_w_implicit_inputs(self): OTHER = 'other' conn = object() + + class _Connection(object): + @classmethod + def from_environment(cls): + return conn + klass = self._getTargetClass() with _Monkey(_MUT, _determine_default_dataset_id=lambda x: x or OTHER, - get_connection=lambda: conn): + Connection=_Connection): client = klass() self.assertEqual(client.dataset_id, OTHER) self.assertEqual(client.namespace, None) diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 5748dfe3b9b3..122b916ea58b 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -21,14 +21,6 @@ class TestEntity(unittest2.TestCase): - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - def _getTargetClass(self): from gcloud.datastore.entity import Entity return Entity diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 29914c919eb0..e3487d77830d 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -17,14 +17,6 @@ class Test_entity_from_protobuf(unittest2.TestCase): - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - def _callFUT(self, val): from gcloud.datastore.helpers import entity_from_protobuf return entity_from_protobuf(val) @@ -148,14 +140,6 @@ def test_nested_entity_no_key(self): class Test_key_from_protobuf(unittest2.TestCase): - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - def _callFUT(self, val): from gcloud.datastore.helpers import key_from_protobuf @@ -578,14 +562,6 @@ def test_prepare_dataset_id_unset(self): class Test_find_true_dataset_id(unittest2.TestCase): - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - def _callFUT(self, dataset_id, connection): from gcloud.datastore.helpers import find_true_dataset_id return find_true_dataset_id(dataset_id, connection) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index eb68f59b9126..0220b22e7787 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -19,14 +19,6 @@ class TestKey(unittest2.TestCase): _DEFAULT_DATASET = 'DATASET' - def setUp(self): - from gcloud.datastore._testing import _setup_defaults - _setup_defaults(self) - - def tearDown(self): - from gcloud.datastore._testing import _tear_down_defaults - _tear_down_defaults(self) - def _getTargetClass(self): from gcloud.datastore.key import Key return Key @@ -34,29 +26,14 @@ def _getTargetClass(self): def _makeOne(self, *args, **kwargs): return self._getTargetClass()(*args, **kwargs) - def _monkeyDatasetID(self, dataset_id=_DEFAULT_DATASET): - from gcloud.datastore._testing import _monkey_defaults - return _monkey_defaults(dataset_id=dataset_id) - def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) def test_ctor_no_dataset_id(self): klass = self._getTargetClass() - with self._monkeyDatasetID(None): - self.assertRaises(ValueError, klass, 'KIND') - - def test_ctor_w_implicit_dataset_id(self): - _DATASET = 'DATASET' - _KIND = 'KIND' - with self._monkeyDatasetID(_DATASET): - key = self._makeOne(_KIND) - self.assertEqual(key.dataset_id, _DATASET) - self.assertEqual(key.namespace, None) - self.assertEqual(key.kind, _KIND) - self.assertEqual(key.path, [{'kind': _KIND}]) + self.assertRaises(ValueError, klass, 'KIND') - def test_ctor_w_implicit_dataset_id_empty_path(self): + def test_ctor_w_explicit_dataset_id_empty_path(self): _DATASET = 'DATASET' self.assertRaises(ValueError, self._makeOne, dataset_id=_DATASET) diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 7d0f822a2e3a..8f6ef628422c 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -220,5 +220,4 @@ def _pop_batch(self): @property def current_batch(self): - if self._batches: - return self._batches[0] + return self._batches and self._batches[0] or None diff --git a/system_tests/clear_datastore.py b/system_tests/clear_datastore.py index 32f4e0873031..61678c9c3979 100644 --- a/system_tests/clear_datastore.py +++ b/system_tests/clear_datastore.py @@ -17,10 +17,10 @@ from six.moves import input from gcloud import datastore -from gcloud.datastore import _implicit_environ +from gcloud.datastore import client -_implicit_environ._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' +client._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' CLIENT = datastore.Client() diff --git a/system_tests/datastore.py b/system_tests/datastore.py index 278640c5feb0..98fca6baa1cb 100644 --- a/system_tests/datastore.py +++ b/system_tests/datastore.py @@ -17,14 +17,14 @@ import unittest2 from gcloud import datastore -from gcloud.datastore import _implicit_environ +from gcloud.datastore import client # This assumes the command is being run via tox hence the # repository root is the current directory. from system_tests import populate_datastore -_implicit_environ._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' -client = datastore.Client() +client._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' +CLIENT = datastore.Client() class TestDatastore(unittest2.TestCase): @@ -33,16 +33,16 @@ def setUp(self): self.case_entities_to_delete = [] def tearDown(self): - with client.transaction(): + with CLIENT.transaction(): keys = [entity.key for entity in self.case_entities_to_delete] - client.delete_multi(keys) + CLIENT.delete_multi(keys) class TestDatastoreAllocateIDs(TestDatastore): def test_allocate_ids(self): num_ids = 10 - allocated_keys = client.allocate_ids(client.key('Kind'), num_ids) + allocated_keys = CLIENT.allocate_ids(CLIENT.key('Kind'), num_ids) self.assertEqual(len(allocated_keys), num_ids) unique_ids = set() @@ -56,7 +56,7 @@ def test_allocate_ids(self): class TestDatastoreSave(TestDatastore): - PARENT = datastore.Key('Blog', 'PizzaMan') + PARENT = CLIENT.key('Blog', 'PizzaMan') def _get_post(self, id_or_name=None, post_content=None): post_content = post_content or { @@ -71,7 +71,7 @@ def _get_post(self, id_or_name=None, post_content=None): # Create an entity with the given content. # NOTE: Using a parent to ensure consistency for query # in `test_empty_kind`. - key = datastore.Key('Post', parent=self.PARENT) + key = CLIENT.key('Post', parent=self.PARENT) entity = datastore.Entity(key=key) entity.update(post_content) @@ -83,7 +83,7 @@ def _get_post(self, id_or_name=None, post_content=None): def _generic_test_post(self, name=None, key_id=None): entity = self._get_post(id_or_name=(name or key_id)) - client.put(entity) + CLIENT.put(entity) # Register entity to be deleted. self.case_entities_to_delete.append(entity) @@ -92,7 +92,7 @@ def _generic_test_post(self, name=None, key_id=None): self.assertEqual(entity.key.name, name) if key_id is not None: self.assertEqual(entity.key.id, key_id) - retrieved_entity = client.get(entity.key) + retrieved_entity = CLIENT.get(entity.key) # Check the given and retrieved are the the same. self.assertEqual(retrieved_entity, entity) @@ -106,7 +106,7 @@ def test_post_with_generated_id(self): self._generic_test_post() def test_save_multiple(self): - with client.transaction() as xact: + with CLIENT.transaction() as xact: entity1 = self._get_post() xact.put(entity1) # Register entity to be deleted. @@ -127,11 +127,11 @@ def test_save_multiple(self): self.case_entities_to_delete.append(entity2) keys = [entity1.key, entity2.key] - matches = client.get_multi(keys) + matches = CLIENT.get_multi(keys) self.assertEqual(len(matches), 2) def test_empty_kind(self): - query = client.query(kind='Post') + query = CLIENT.query(kind='Post') query.ancestor = self.PARENT posts = list(query.fetch(limit=2)) self.assertEqual(posts, []) @@ -140,16 +140,16 @@ def test_empty_kind(self): class TestDatastoreSaveKeys(TestDatastore): def test_save_key_self_reference(self): - parent_key = datastore.Key('Residence', 'NewYork') - key = datastore.Key('Person', 'name', parent=parent_key) + parent_key = CLIENT.key('Residence', 'NewYork') + key = CLIENT.key('Person', 'name', parent=parent_key) entity = datastore.Entity(key=key) entity['fullName'] = u'Full name' entity['linkedTo'] = key # Self reference. - client.put(entity) + CLIENT.put(entity) self.case_entities_to_delete.append(entity) - query = client.query(kind='Person') + query = CLIENT.query(kind='Person') # Adding ancestor to ensure consistency. query.ancestor = parent_key query.add_filter('linkedTo', '=', key) @@ -164,10 +164,10 @@ class TestDatastoreQuery(TestDatastore): def setUpClass(cls): super(TestDatastoreQuery, cls).setUpClass() cls.CHARACTERS = populate_datastore.CHARACTERS - cls.ANCESTOR_KEY = datastore.Key(*populate_datastore.ANCESTOR) + cls.ANCESTOR_KEY = CLIENT.key(*populate_datastore.ANCESTOR) def _base_query(self): - return client.query(kind='Character', ancestor=self.ANCESTOR_KEY) + return CLIENT.query(kind='Character', ancestor=self.ANCESTOR_KEY) def test_limit_queries(self): limit = 5 @@ -212,7 +212,7 @@ def test_ancestor_query(self): self.assertEqual(len(entities), expected_matches) def test_query___key___filter(self): - rickard_key = datastore.Key(*populate_datastore.RICKARD) + rickard_key = CLIENT.key(*populate_datastore.RICKARD) query = self._base_query() query.add_filter('__key__', '=', rickard_key) @@ -327,16 +327,16 @@ def test_query_group_by(self): class TestDatastoreTransaction(TestDatastore): def test_transaction(self): - entity = datastore.Entity(key=datastore.Key('Company', 'Google')) + entity = datastore.Entity(key=CLIENT.key('Company', 'Google')) entity['url'] = u'www.google.com' - with client.transaction() as xact: - result = client.get(entity.key) + with CLIENT.transaction() as xact: + result = CLIENT.get(entity.key) if result is None: xact.put(entity) self.case_entities_to_delete.append(entity) # This will always return after the transaction. - retrieved_entity = client.get(entity.key) + retrieved_entity = CLIENT.get(entity.key) self.case_entities_to_delete.append(retrieved_entity) self.assertEqual(retrieved_entity, entity) diff --git a/system_tests/populate_datastore.py b/system_tests/populate_datastore.py index 5c4dc8895acf..0c2417b9309e 100644 --- a/system_tests/populate_datastore.py +++ b/system_tests/populate_datastore.py @@ -17,10 +17,10 @@ from six.moves import zip from gcloud import datastore -from gcloud.datastore import _implicit_environ +from gcloud.datastore import client -_implicit_environ._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' +client._DATASET_ENV_VAR_NAME = 'GCLOUD_TESTS_DATASET_ID' CLIENT = datastore.Client()