Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Lazy load defaults #657

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 2 additions & 34 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,9 @@
when race conditions may occur.
"""

import os

from gcloud import credentials
from gcloud.datastore import _implicit_environ
from gcloud.datastore._implicit_environ import set_default_dataset_id
from gcloud.datastore.api import allocate_ids
from gcloud.datastore.api import delete
from gcloud.datastore.api import get
Expand All @@ -66,37 +65,6 @@
'https://www.googleapis.com/auth/userinfo.email')
"""The scopes required for authenticating as a Cloud Datastore consumer."""

_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID'


def set_default_dataset_id(dataset_id=None):
"""Set default dataset ID either explicitly or implicitly as fall-back.

In implicit case, supports three cases. In order of precedence, the
implicit cases are:
- GCLOUD_DATASET_ID environment variable
- Google App Engine application ID
- Google Compute Engine project ID (from metadata server)

:type dataset_id: string
:param dataset_id: Optional. The dataset ID to use as default.

:raises: :class:`EnvironmentError` if no dataset ID was implied.
"""
if dataset_id is None:
dataset_id = os.getenv(_DATASET_ENV_VAR_NAME)

if dataset_id is None:
dataset_id = _implicit_environ.app_engine_id()

if dataset_id is None:
dataset_id = _implicit_environ.compute_engine_id()

if dataset_id is not None:
_implicit_environ.DATASET_ID = dataset_id
else:
raise EnvironmentError('No dataset ID could be inferred.')


def set_default_connection(connection=None):
"""Set default connection either explicitly or implicitly as fall-back.
Expand All @@ -105,7 +73,7 @@ def set_default_connection(connection=None):
:param connection: A connection provided to be the default.
"""
connection = connection or get_connection()
_implicit_environ.CONNECTION = connection
_implicit_environ.DEFAULT_ENVIRON.connection = connection


def set_defaults(dataset_id=None, connection=None):
Expand Down
88 changes: 82 additions & 6 deletions gcloud/datastore/_implicit_environ.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.

"""Module to provide implicit behavior based on enviroment.
"""Module to provide implicit behavior based on environment.

Acts as a mutable namespace to allow the datastore package to
imply the current dataset ID and connection from the enviroment.
imply the current dataset ID and connection from the environment.
"""

import os
import socket

from six.moves.http_client import HTTPConnection # pylint: disable=F0401
Expand All @@ -28,11 +29,34 @@
app_identity = None


DATASET_ID = None
"""Module global to allow persistent implied dataset ID from enviroment."""
_DATASET_ENV_VAR_NAME = 'GCLOUD_DATASET_ID'

CONNECTION = None
"""Module global to allow persistent implied connection from enviroment."""

class _DatasetIDProperty(object):
"""Descriptor for lazy loaded dataset ID."""

def __get__(self, obj, objtype):
if obj is None or objtype is not Environment:
return self

obj.dataset_id = get_default_dataset_id()

This comment was marked as spam.

return obj.dataset_id


class Environment(object):
"""Container for environment settings.

:type dataset_id: string
:param dataset_id: Persistent implied dataset ID from environment.

:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Persistent implied connection from environment.
"""

dataset_id = _DatasetIDProperty()

def __init__(self):
self.connection = None


def app_engine_id():
Expand Down Expand Up @@ -79,3 +103,55 @@ def compute_engine_id():
pass
finally:
connection.close()


def get_default_dataset_id(dataset_id=None):
"""Get default dataset ID either explicitly or implicitly as fall-back.

In implicit case, supports three cases. In order of precedence, the
implicit cases are:
- GCLOUD_DATASET_ID environment variable
- Google App Engine application ID
- Google Compute Engine project ID (from metadata server)

:type dataset_id: string
:param dataset_id: Optional. The dataset ID to use as default.

:rtype: string or ``NoneType``
:returns: The inferred dataset or None.
"""
if dataset_id is None:
dataset_id = os.getenv(_DATASET_ENV_VAR_NAME)

if dataset_id is None:
dataset_id = app_engine_id()

if dataset_id is None:
dataset_id = compute_engine_id()

return dataset_id


def set_default_dataset_id(dataset_id=None):
"""Set default dataset ID either explicitly or implicitly as fall-back.

In implicit case, supports three cases. In order of precedence, the
implicit cases are:
- GCLOUD_DATASET_ID environment variable
- Google App Engine application ID
- Google Compute Engine project ID (from metadata server)

:type dataset_id: string
:param dataset_id: Optional. The dataset ID to use as default.

:raises: :class:`EnvironmentError` if no dataset ID was implied.
"""
dataset_id = get_default_dataset_id(dataset_id=dataset_id)

if dataset_id is not None:
DEFAULT_ENVIRON.dataset_id = dataset_id
else:
raise EnvironmentError('No dataset ID could be inferred.')


DEFAULT_ENVIRON = Environment()
10 changes: 5 additions & 5 deletions gcloud/datastore/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
Query objects rather than via protobufs.
"""

from gcloud.datastore import _implicit_environ
from gcloud.datastore._implicit_environ import DEFAULT_ENVIRON
from gcloud.datastore.batch import Batch
from gcloud.datastore.transaction import Transaction
from gcloud.datastore import helpers
Expand Down Expand Up @@ -57,9 +57,9 @@ def _require_dataset_id(dataset_id=None, first_key=None):
return top.dataset_id
if first_key is not None:
return first_key.dataset_id
if _implicit_environ.DATASET_ID is None:
if DEFAULT_ENVIRON.dataset_id is None:
raise EnvironmentError('Dataset ID could not be inferred.')
return _implicit_environ.DATASET_ID
return DEFAULT_ENVIRON.dataset_id


def _require_connection(connection=None):
Expand All @@ -78,9 +78,9 @@ def _require_connection(connection=None):
if top is not None:
connection = top.connection
else:
if _implicit_environ.CONNECTION is None:
if DEFAULT_ENVIRON.connection is None:
raise EnvironmentError('Connection could not be inferred.')
connection = _implicit_environ.CONNECTION
connection = DEFAULT_ENVIRON.connection
return connection


Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"""Create / interact with a batch of updates / deletes."""

from gcloud._localstack import _LocalStack
from gcloud.datastore import _implicit_environ
from gcloud.datastore._implicit_environ import DEFAULT_ENVIRON
from gcloud.datastore import helpers
from gcloud.datastore.key import _dataset_ids_equal
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
Expand Down Expand Up @@ -70,8 +70,8 @@ def __init__(self, dataset_id=None, connection=None):
:raises: :class:`ValueError` if either a connection or dataset ID
are not set.
"""
self._connection = connection or _implicit_environ.CONNECTION
self._dataset_id = dataset_id or _implicit_environ.DATASET_ID
self._connection = connection or DEFAULT_ENVIRON.connection
self._dataset_id = dataset_id or DEFAULT_ENVIRON.dataset_id

if self._connection is None or self._dataset_id is None:
raise ValueError('A batch must have a connection and '
Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import copy
import six

from gcloud.datastore import _implicit_environ
from gcloud.datastore._implicit_environ import DEFAULT_ENVIRON
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb


Expand Down Expand Up @@ -400,10 +400,10 @@ def _validate_dataset_id(dataset_id, parent):

if dataset_id is None:

if _implicit_environ.DATASET_ID is None:
if DEFAULT_ENVIRON.dataset_id is None:
raise ValueError("A Key must have a dataset ID set.")

dataset_id = _implicit_environ.DATASET_ID
dataset_id = DEFAULT_ENVIRON.dataset_id

return dataset_id

Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

import base64

from gcloud.datastore import _implicit_environ
from gcloud.datastore._implicit_environ import DEFAULT_ENVIRON
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
from gcloud.datastore import helpers
from gcloud.datastore.key import Key
Expand Down Expand Up @@ -80,7 +80,7 @@ def __init__(self,
group_by=()):

if dataset_id is None:
dataset_id = _implicit_environ.DATASET_ID
dataset_id = DEFAULT_ENVIRON.dataset_id

if dataset_id is None:
raise ValueError("No dataset ID supplied, and no default set.")
Expand Down Expand Up @@ -326,7 +326,7 @@ def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None,
default has been set.
"""
if connection is None:
connection = _implicit_environ.CONNECTION
connection = DEFAULT_ENVIRON.connection

if connection is None:
raise ValueError("No connection passed, and no default set")
Expand Down
Loading