From e998859d8dba24a97be97ead6edc759c77a22375 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 7 Jan 2015 11:04:04 -0800 Subject: [PATCH] Removing datastore.Dataset class and its uses. Addresses part 8 of #477. --- docs/datastore-dataset.rst | 7 --- docs/index.rst | 1 - gcloud/datastore/__init__.py | 60 +++++---------------- gcloud/datastore/_implicit_environ.py | 5 +- gcloud/datastore/dataset.py | 71 ------------------------ gcloud/datastore/key.py | 5 +- gcloud/datastore/test___init__.py | 78 ++++++++------------------- gcloud/datastore/test_dataset.py | 41 -------------- gcloud/datastore/test_entity.py | 14 +---- gcloud/datastore/test_helpers.py | 12 ++--- gcloud/datastore/test_key.py | 13 ++--- gcloud/datastore/test_transaction.py | 2 +- regression/datastore.py | 2 +- regression/regression_utils.py | 11 ---- run_pylint.py | 9 ++-- 15 files changed, 53 insertions(+), 278 deletions(-) delete mode 100644 docs/datastore-dataset.rst delete mode 100644 gcloud/datastore/dataset.py delete mode 100644 gcloud/datastore/test_dataset.py diff --git a/docs/datastore-dataset.rst b/docs/datastore-dataset.rst deleted file mode 100644 index 4bfd946964cb..000000000000 --- a/docs/datastore-dataset.rst +++ /dev/null @@ -1,7 +0,0 @@ -Datasets -~~~~~~~~ - -.. automodule:: gcloud.datastore.dataset - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 28865c028fc6..e1d0d7c8628a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -3,7 +3,6 @@ :hidden: datastore-api - datastore-dataset datastore-entities datastore-keys datastore-transactions diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index 5033aec06b63..fca5f10e4270 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -29,10 +29,6 @@ which represents a connection between your machine and the Cloud Datastore API. -- :class:`gcloud.datastore.dataset.Dataset` - which represents a particular dataset - (akin to a database name in relational database world). - - :class:`gcloud.datastore.entity.Entity` which represents a single entity in the datastore (akin to a row in relational database world). @@ -50,7 +46,6 @@ from gcloud import credentials from gcloud.datastore import _implicit_environ from gcloud.datastore.connection import Connection -from gcloud.datastore.dataset import Dataset from gcloud.datastore import helpers @@ -61,7 +56,7 @@ _DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID' -def set_default_dataset(dataset_id=None): +def set_default_dataset_id(dataset_id=None): """Set default dataset ID either explicitly or implicitly as fall-back. In implicit case, currently only supports enviroment variable but will @@ -71,15 +66,13 @@ def set_default_dataset(dataset_id=None): - GCLOUD_DATASET_ID :type dataset_id: :class:`str`. - :param dataset_id: Optional. The dataset ID to use for the default - dataset. + :param dataset_id: Optional. The dataset ID to use as default. """ if dataset_id is None: dataset_id = os.getenv(_DATASET_ENV_VAR_NAME) if dataset_id is not None: _implicit_environ.DATASET_ID = dataset_id - _implicit_environ.DATASET = get_dataset(dataset_id) def set_default_connection(connection=None): @@ -111,43 +104,16 @@ def get_connection(): return Connection(credentials=scoped_credentials) -def get_dataset(dataset_id): - """Establish a connection to a particular dataset in the Cloud Datastore. - - This is a shortcut method for creating a connection and using it - to connect to a dataset. - - You'll generally use this as the first call to working with the API: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id') - >>> # Now you can do things with the dataset. - >>> dataset.query().kind('TestKind').fetch() - [...] - - :type dataset_id: string - :param dataset_id: The id of the dataset you want to use. - This is akin to a database name - and is usually the same as your Cloud Datastore project - name. - - :rtype: :class:`gcloud.datastore.dataset.Dataset` - :returns: A dataset with a connection using the provided credentials. - """ - connection = get_connection() - return Dataset(dataset_id, connection=connection) - - -def _require_dataset(): - """Convenience method to ensure DATASET is set. +def _require_dataset_id(): + """Convenience method to ensure DATASET_ID is set. - :rtype: :class:`gcloud.datastore.dataset.Dataset` - :returns: A dataset based on the current environment. - :raises: :class:`EnvironmentError` if DATASET is not set. + :rtype: :class:`str` + :returns: A dataset ID based on the current environment. + :raises: :class:`EnvironmentError` if DATASET_ID is not set. """ - if _implicit_environ.DATASET is None: - raise EnvironmentError('Dataset could not be inferred.') - return _implicit_environ.DATASET + if _implicit_environ.DATASET_ID is None: + raise EnvironmentError('Dataset ID could not be inferred.') + return _implicit_environ.DATASET_ID def _require_connection(): @@ -164,7 +130,7 @@ def _require_connection(): def get_entities(keys, missing=None, deferred=None, connection=None, dataset_id=None): - """Retrieves entities from implied dataset, along with their attributes. + """Retrieves entities, along with their attributes. :type keys: list of :class:`gcloud.datastore.key.Key` :param keys: The name of the item to retrieve. @@ -189,7 +155,7 @@ def get_entities(keys, missing=None, deferred=None, :returns: The requested entities. """ connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset().id() + dataset_id = dataset_id or _require_dataset_id() entity_pbs = connection.lookup( dataset_id=dataset_id, @@ -234,7 +200,7 @@ def allocate_ids(incomplete_key, num_ids, connection=None, dataset_id=None): :raises: `ValueError` if `incomplete_key` is not a partial key. """ connection = connection or _require_connection() - dataset_id = dataset_id or _require_dataset().id() + dataset_id = dataset_id or _require_dataset_id() if not incomplete_key.is_partial: raise ValueError(('Key is not partial.', incomplete_key)) diff --git a/gcloud/datastore/_implicit_environ.py b/gcloud/datastore/_implicit_environ.py index 88d19c15682a..8410ab7cdece 100644 --- a/gcloud/datastore/_implicit_environ.py +++ b/gcloud/datastore/_implicit_environ.py @@ -1,15 +1,12 @@ """Module to provide implicit behavior based on enviroment. Acts as a mutable namespace to allow the datastore package to -imply the current dataset and connection from the enviroment. +imply the current dataset ID and connection from the enviroment. """ DATASET_ID = None """Module global to allow persistent implied dataset ID from enviroment.""" -DATASET = None -"""Module global to allow persistent implied dataset from enviroment.""" - CONNECTION = None """Module global to allow persistent implied connection from enviroment.""" diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py deleted file mode 100644 index d51df64b11cb..000000000000 --- a/gcloud/datastore/dataset.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Create / interact with gcloud datastore datasets.""" - - -class Dataset(object): - """A dataset in the Cloud Datastore. - - This class acts as an abstraction of a single dataset in the Cloud - Datastore. - - A dataset is analogous to a database in relational database world, - and corresponds to a single project using the Cloud Datastore. - - Typically, you would only have one of these per connection however - it didn't seem right to collapse the functionality of a connection - and a dataset together into a single class. - - Datasets (like :class:`gcloud.datastore.query.Query`) are immutable. - That is, you cannot change the ID and connection references. If you - need to modify the connection or ID, it's recommended to construct a - new :class:`Dataset`. - - :type id: string - :param id: The ID of the dataset (your project ID) - - :type connection: :class:`gcloud.datastore.connection.Connection` - :param connection: The connection to use for executing API calls. - """ - - def __init__(self, id, connection=None): - self._connection = connection - self._id = id - - def connection(self): - """Get the current connection. - - >>> dataset = Dataset('dataset-id', connection=conn) - >>> dataset.connection() - - - :rtype: :class:`gcloud.datastore.connection.Connection` - :returns: Returns the current connection. - """ - - return self._connection - - def id(self): - """Get the current dataset ID. - - >>> dataset = Dataset('dataset-id', connection=conn) - >>> dataset.id() - 'dataset-id' - - :rtype: string - :returns: The current dataset ID. - """ - - return self._id diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 2b289d452eba..074fa3246cae 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -83,9 +83,8 @@ def _validate_dataset_id(self): can be implied. """ if self._dataset_id is None: - if _implicit_environ.DATASET is not None: - # This assumes DATASET.id() is not None. - self._dataset_id = _implicit_environ.DATASET.id() + if _implicit_environ.DATASET_ID is not None: + self._dataset_id = _implicit_environ.DATASET_ID else: raise ValueError('A Key must have a dataset ID set.') diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 2146d8fdc2f9..575665f5228c 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -35,43 +35,36 @@ def test_it(self): self.assertTrue(client._get_app_default_called) -class Test_set_default_dataset(unittest2.TestCase): +class Test_set_default_dataset_id(unittest2.TestCase): def setUp(self): from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET self._replaced_dataset_id = _implicit_environ.DATASET_ID - _implicit_environ.DATASET = _implicit_environ.DATASET_ID = None + _implicit_environ.DATASET_ID = None def tearDown(self): from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset _implicit_environ.DATASET_ID = self._replaced_dataset_id def _callFUT(self, dataset_id=None): - from gcloud.datastore import set_default_dataset - return set_default_dataset(dataset_id=dataset_id) + from gcloud.datastore import set_default_dataset_id + return set_default_dataset_id(dataset_id=dataset_id) def _test_with_environ(self, environ, expected_result, dataset_id=None): import os from gcloud._testing import _Monkey - from gcloud import datastore from gcloud.datastore import _implicit_environ # Check the environment is unset. - self.assertEqual(_implicit_environ.DATASET, None) + self.assertEqual(_implicit_environ.DATASET_ID, None) def custom_getenv(key): return environ.get(key) - def custom_get_dataset(local_dataset_id): - return local_dataset_id - with _Monkey(os, getenv=custom_getenv): - with _Monkey(datastore, get_dataset=custom_get_dataset): - self._callFUT(dataset_id=dataset_id) + self._callFUT(dataset_id=dataset_id) - self.assertEqual(_implicit_environ.DATASET, expected_result) + self.assertEqual(_implicit_environ.DATASET_ID, expected_result) def test_set_from_env_var(self): from gcloud.datastore import _DATASET_ENV_VAR_NAME @@ -134,49 +127,26 @@ def test_set_implicit(self): self.assertEqual(_implicit_environ.CONNECTION, fake_cnxn) -class Test_get_dataset(unittest2.TestCase): - - def _callFUT(self, dataset_id): - from gcloud.datastore import get_dataset - return get_dataset(dataset_id) - - def test_it(self): - from gcloud import credentials - from gcloud.datastore.connection import Connection - from gcloud.datastore.dataset import Dataset - from gcloud.test_credentials import _Client - from gcloud._testing import _Monkey - - DATASET_ID = 'DATASET' - client = _Client() - with _Monkey(credentials, client=client): - found = self._callFUT(DATASET_ID) - self.assertTrue(isinstance(found, Dataset)) - self.assertTrue(isinstance(found.connection(), Connection)) - self.assertEqual(found.id(), DATASET_ID) - self.assertTrue(client._get_app_default_called) - - class Test_implicit_behavior(unittest2.TestCase): - def test__require_dataset_value_unset(self): + def test__require_dataset_id_value_unset(self): import gcloud.datastore from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey - with _Monkey(_implicit_environ, DATASET=None): + with _Monkey(_implicit_environ, DATASET_ID=None): with self.assertRaises(EnvironmentError): - gcloud.datastore._require_dataset() + gcloud.datastore._require_dataset_id() - def test__require_dataset_value_set(self): + def test__require_dataset_id_value_set(self): import gcloud.datastore from gcloud.datastore import _implicit_environ from gcloud._testing import _Monkey - FAKE_DATASET = object() - with _Monkey(_implicit_environ, DATASET=FAKE_DATASET): - stored_dataset = gcloud.datastore._require_dataset() - self.assertTrue(stored_dataset is FAKE_DATASET) + FAKE_DATASET_ID = object() + with _Monkey(_implicit_environ, DATASET_ID=FAKE_DATASET_ID): + stored_dataset_id = gcloud.datastore._require_dataset_id() + self.assertTrue(stored_dataset_id is FAKE_DATASET_ID) def test__require_connection_value_unset(self): import gcloud.datastore @@ -309,7 +279,6 @@ def test_get_entities_implicit(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey DATASET_ID = 'DATASET' @@ -323,11 +292,10 @@ def test_get_entities_implicit(self): # Make a connection to return the entity pb. CUSTOM_CONNECTION = _Connection(entity_pb) - CUSTOM_DATASET = _Dataset() key = Key(KIND, ID, dataset_id=DATASET_ID) - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, - CONNECTION=CUSTOM_CONNECTION): + with _Monkey(_implicit_environ, CONNECTION=CUSTOM_CONNECTION, + DATASET_ID=DATASET_ID): result, = self._callFUT([key]) expected_called_with = { @@ -375,14 +343,12 @@ def test_allocate_ids_implicit(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey - CUSTOM_DATASET = _Dataset() CUSTOM_CONNECTION = _Connection() NUM_IDS = 2 - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, - CONNECTION=CUSTOM_CONNECTION): + with _Monkey(_implicit_environ, CONNECTION=CUSTOM_CONNECTION, + DATASET_ID='DATASET'): INCOMPLETE_KEY = Key('KIND') result = self._callFUT(INCOMPLETE_KEY, NUM_IDS) @@ -393,13 +359,11 @@ def test_allocate_ids_with_complete(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection - from gcloud.datastore.test_entity import _Dataset from gcloud._testing import _Monkey - CUSTOM_DATASET = _Dataset() CUSTOM_CONNECTION = _Connection() - with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET, - CONNECTION=CUSTOM_CONNECTION): + with _Monkey(_implicit_environ, CONNECTION=CUSTOM_CONNECTION, + DATASET_ID='DATASET'): COMPLETE_KEY = Key('KIND', 1234) self.assertRaises(ValueError, self._callFUT, COMPLETE_KEY, 2) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py deleted file mode 100644 index 6c0a2a3fa0d2..000000000000 --- a/gcloud/datastore/test_dataset.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2014 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest2 - - -class TestDataset(unittest2.TestCase): - - def _getTargetClass(self): - from gcloud.datastore.dataset import Dataset - return Dataset - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_ctor_missing_dataset_id(self): - self.assertRaises(TypeError, self._makeOne) - - def test_ctor_defaults(self): - DATASET_ID = 'DATASET' - dataset = self._makeOne(DATASET_ID) - self.assertEqual(dataset.id(), DATASET_ID) - self.assertEqual(dataset.connection(), None) - - def test_ctor_explicit(self): - DATASET_ID = 'DATASET' - CONNECTION = object() - dataset = self._makeOne(DATASET_ID, CONNECTION) - self.assertEqual(dataset.id(), DATASET_ID) - self.assertTrue(dataset.connection() is CONNECTION) diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 4a54ce7198be..b3b2ecd32831 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -23,13 +23,11 @@ class TestEntity(unittest2.TestCase): def setUp(self): from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET self._replaced_dataset_id = _implicit_environ.DATASET_ID - _implicit_environ.DATASET = _implicit_environ.DATASET_ID = None + _implicit_environ.DATASET_ID = None def tearDown(self): from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset _implicit_environ.DATASET_ID = self._replaced_dataset_id def _getTargetClass(self): @@ -193,16 +191,6 @@ def get(self, connection=None): return self._stored -class _Dataset(dict): - - def __init__(self, connection=None): - super(_Dataset, self).__init__() - self._connection = connection - - def id(self): - return _DATASET_ID - - class _Connection(object): _transaction = _saved = _deleted = None _save_result = (False, None) diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 5809a427f24d..4044d4889fb0 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -19,20 +19,18 @@ class Test_entity_from_protobuf(unittest2.TestCase): def setUp(self): from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET self._replaced_dataset_id = _implicit_environ.DATASET_ID - _implicit_environ.DATASET = _implicit_environ.DATASET_ID = None + _implicit_environ.DATASET_ID = None def tearDown(self): from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset _implicit_environ.DATASET_ID = self._replaced_dataset_id def _callFUT(self, val): from gcloud.datastore.helpers import entity_from_protobuf return entity_from_protobuf(val) - def test_wo_dataset(self): + def test_wo_dataset_id(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb _DATASET_ID = 'DATASET' @@ -53,7 +51,7 @@ def test_wo_dataset(self): self.assertEqual(key.kind, _KIND) self.assertEqual(key.id, _ID) - def test_w_dataset(self): + def test_w_dataset_id(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb _DATASET_ID = 'DATASET' @@ -471,7 +469,7 @@ def _callFUT(self, key_pb): return _prepare_key_for_request(key_pb) - def test_prepare_dataset_valid(self): + def test_prepare_dataset_id_valid(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb key = datastore_pb.Key() key.partition_id.dataset_id = 'foo' @@ -482,7 +480,7 @@ def test_prepare_dataset_valid(self): new_key.ClearField('partition_id') self.assertEqual(new_key, key_without) - def test_prepare_dataset_unset(self): + def test_prepare_dataset_id_unset(self): from gcloud.datastore import datastore_v1_pb2 as datastore_pb key = datastore_pb.Key() new_key = self._callFUT(key) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index beb6fd92ef4a..f8dbde38ca63 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -21,21 +21,18 @@ def setUp(self): self._DEFAULT_DATASET = 'DATASET' from gcloud.datastore import _implicit_environ - self._replaced_dataset = _implicit_environ.DATASET self._replaced_dataset_id = _implicit_environ.DATASET_ID - _implicit_environ.DATASET = _implicit_environ.DATASET_ID = None + _implicit_environ.DATASET_ID = None def tearDown(self): from gcloud.datastore import _implicit_environ - _implicit_environ.DATASET = self._replaced_dataset _implicit_environ.DATASET_ID = self._replaced_dataset_id def _getTargetClass(self): from gcloud.datastore import _implicit_environ - from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key - _implicit_environ.DATASET = Dataset(self._DEFAULT_DATASET) + _implicit_environ.DATASET_ID = self._DEFAULT_DATASET return Key def _makeOne(self, *args, **kwargs): @@ -44,11 +41,11 @@ def _makeOne(self, *args, **kwargs): def test_ctor_empty(self): self.assertRaises(ValueError, self._makeOne) - def test_ctor_no_dataset(self): + def test_ctor_no_dataset_id(self): from gcloud._testing import _Monkey from gcloud.datastore import _implicit_environ klass = self._getTargetClass() - with _Monkey(_implicit_environ, DATASET=None): + with _Monkey(_implicit_environ, DATASET_ID=None): self.assertRaises(ValueError, klass, 'KIND') def test_ctor_parent(self): @@ -171,7 +168,7 @@ def test_to_protobuf_defaults(self): self.assertEqual(elem.id, 0) self.assertFalse(elem.HasField('id')) - def test_to_protobuf_w_explicit_dataset(self): + def test_to_protobuf_w_explicit_dataset_id(self): _DATASET = 'DATASET-ALT' key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 985b6e467cf2..60808850fa58 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -29,7 +29,7 @@ def _makeOne(self, dataset_id=None, connection=None): def test_ctor_missing_required(self): from gcloud.datastore import _implicit_environ - self.assertEqual(_implicit_environ.DATASET, None) + self.assertEqual(_implicit_environ.DATASET_ID, None) with self.assertRaises(ValueError): self._makeOne() diff --git a/regression/datastore.py b/regression/datastore.py index 05138a961e75..a6e78449b549 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -28,7 +28,7 @@ DATASET_ID = os.getenv('GCLOUD_TESTS_DATASET_ID') -datastore.set_default_dataset(dataset_id=DATASET_ID) +datastore.set_default_dataset_id(dataset_id=DATASET_ID) datastore.set_default_connection() diff --git a/regression/regression_utils.py b/regression/regression_utils.py index 69eea2b1f932..d0cd1cdd3b0f 100644 --- a/regression/regression_utils.py +++ b/regression/regression_utils.py @@ -16,7 +16,6 @@ import os import sys -from gcloud import datastore from gcloud import storage @@ -49,16 +48,6 @@ def get_environ(require_datastore=False, require_storage=False): } -def get_dataset(): - environ = get_environ(require_datastore=True) - dataset_id = environ['dataset_id'] - key = ('get_dataset', dataset_id) - if key not in CACHED_RETURN_VALS: - # Cache return value for the environment. - CACHED_RETURN_VALS[key] = datastore.get_dataset(dataset_id) - return CACHED_RETURN_VALS[key] - - def get_storage_connection(): environ = get_environ(require_storage=True) project_id = environ['project_id'] diff --git a/run_pylint.py b/run_pylint.py index 67dfd268f282..3fc95f8269e1 100644 --- a/run_pylint.py +++ b/run_pylint.py @@ -125,12 +125,9 @@ def get_files_for_linting(): a remote branch to diff against. """ diff_base = None - if (os.getenv('TRAVIS_BRANCH') == 'master' and - os.getenv('TRAVIS_PULL_REQUEST') != 'false'): - # In the case of a pull request into master, we want to - # diff against HEAD in master. - diff_base = 'origin/master' - elif os.getenv('TRAVIS') is None: + # Temporary turning Travis diffbase off since pylint will fail on a + # deleted file that shows up in a diff. + if os.getenv('TRAVIS') is None: # Only allow specified remote and branch in local dev. remote = os.getenv('GCLOUD_REMOTE_FOR_LINT') branch = os.getenv('GCLOUD_BRANCH_FOR_LINT')