diff --git a/.jscsrc b/.jscsrc
index 9d2a5011ab78..699f2ea283dc 100644
--- a/.jscsrc
+++ b/.jscsrc
@@ -20,6 +20,7 @@
"allowUrlComments": true
},
"excludeFiles": [
+ "lib/datastore/proto.js",
"system-test/data/*",
"test/testdata/*"
]
diff --git a/.jshintignore b/.jshintignore
new file mode 100644
index 000000000000..200a9333a5ee
--- /dev/null
+++ b/.jshintignore
@@ -0,0 +1,3 @@
+lib/datastore/proto.js
+system-test/data/*
+test/testdata/*
diff --git a/docs/site/components/docs/datastore-overview.html b/docs/site/components/docs/datastore-overview.html
index 66b56d30b138..23bbdbd43c43 100644
--- a/docs/site/components/docs/datastore-overview.html
+++ b/docs/site/components/docs/datastore-overview.html
@@ -1,6 +1,6 @@
Datastore Overview
- The gcloud.datastore
object gives you some convenience methods, as well as exposes a dataset
function. This will allow you to create a dataset
, which is the object from which you will interact with the Google Cloud Datastore.
+ The gcloud.datastore
object allows you to interact with Google Cloud Datastore.
To learn more about Datastore, read the Google Cloud Datastore Concepts Overview.
diff --git a/docs/site/components/docs/docs-values.js b/docs/site/components/docs/docs-values.js
index ace3d321cb56..da4ead6ad0af 100644
--- a/docs/site/components/docs/docs-values.js
+++ b/docs/site/components/docs/docs-values.js
@@ -116,6 +116,21 @@ angular.module('gcloud.docs')
]
},
+ datastorev1Beta3: {
+ title: 'Datastore',
+ _url: '{baseUrl}/datastore',
+ pages: [
+ {
+ title: 'Transaction',
+ url: '/transaction'
+ },
+ {
+ title: 'Query',
+ url: '/query'
+ }
+ ]
+ },
+
dns: {
title: 'DNS',
_url: '{baseUrl}/dns',
@@ -218,8 +233,11 @@ angular.module('gcloud.docs')
// deprecate old datastore api.
'<0.8.0': ['datastore'],
- // introduce datastore refactor + pubsub.
- '>=0.8.0': ['datastoreWithTransaction', 'pubsub'],
+ // introduce datastore refactor.
+ '>=0.8.0 <0.24.0': ['datastoreWithTransaction'],
+
+ // introduce pubsub api.
+ '>=0.8.0': ['pubsub'],
// deprecate old storage api.
'<0.9.0': ['storage'],
@@ -240,6 +258,12 @@ angular.module('gcloud.docs')
'>=0.20.0': ['compute'],
// introduce resource api.
- '>=0.22.0': ['resource']
+ '>=0.22.0': ['resource'],
+
+ // deprecate datastore v1beta2 api.
+ '<0.24.0': ['datastoreWithTransaction'],
+
+ // introduce datastore v1beta3 api.
+ '>=0.24.0': ['datastorev1Beta3']
}
});
diff --git a/lib/datastore/dataset.js b/lib/datastore/dataset.js
deleted file mode 100644
index f149f7405281..000000000000
--- a/lib/datastore/dataset.js
+++ /dev/null
@@ -1,272 +0,0 @@
-/*!
- * Copyright 2014 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*!
- * @module datastore/dataset
- */
-
-'use strict';
-
-var arrify = require('arrify');
-var is = require('is');
-var nodeutil = require('util');
-
-/**
- * @type {module:datastore/entity}
- * @private
- */
-var entity = require('./entity.js');
-
-/**
- * @type {module:datastore/query}
- * @private
- */
-var Query = require('./query.js');
-
-/**
- * @type {module:datastore/transaction}
- * @private
- */
-var Transaction = require('./transaction.js');
-
-/**
- * @type {module:common/util}
- * @private
- */
-var util = require('../common/util.js');
-
-/**
- * @type {module:datastore/request}
- * @private
- */
-var DatastoreRequest = require('./request.js');
-
-/**
- * Scopes for Google Datastore access.
- * @const {array} SCOPES
- * @private
- */
-var SCOPES = [
- 'https://www.googleapis.com/auth/datastore',
- 'https://www.googleapis.com/auth/userinfo.email'
-];
-
-/**
- * Interact with a dataset from the
- * [Google Cloud Datastore](https://developers.google.com/datastore/).
- *
- * @constructor
- * @alias module:datastore/dataset
- * @mixes module:datastore/request
- *
- * @param {object=} options - [Configuration object](#/docs/?method=gcloud).
- * @param {string=} options.apiEndpoint - Override the default API endpoint used
- * to reach Datastore. This is useful for connecting to your local Datastore
- * server (usually "http://localhost:8080").
- * @param {string} options.namespace - Namespace to isolate transactions to.
- *
- * @example
- * var datastore = gcloud.datastore;
- *
- * var dataset = datastore.dataset({
- * projectId: 'my-project',
- * keyFilename: '/path/to/keyfile.json'
- * });
- *
- * //-
- * // Connect to your local Datastore server.
- * //-
- * var dataset = datastore.dataset({
- * projectId: 'my-project',
- * apiEndpoint: 'http://localhost:8080'
- * });
- *
- * //-
- * // The `process.env.DATASTORE_HOST` environment variable is also recognized.
- * // If set, you may omit the `apiEndpoint` option.
- * //-
- */
-function Dataset(options) {
- if (!(this instanceof Dataset)) {
- return new Dataset(options);
- }
-
- options = options || {};
-
- if (!options.projectId) {
- throw util.missingProjectIdError;
- }
-
- this.makeAuthenticatedRequest_ = util.makeAuthenticatedRequestFactory({
- customEndpoint: typeof options.apiEndpoint !== 'undefined',
- credentials: options.credentials,
- keyFile: options.keyFilename,
- scopes: SCOPES,
- email: options.email
- });
-
- this.apiEndpoint = Dataset.determineApiEndpoint_(options);
- this.namespace = options.namespace;
- this.projectId = options.projectId;
-}
-
-nodeutil.inherits(Dataset, DatastoreRequest);
-
-/**
- * Determine the appropriate endpoint to use for API requests. If not explicitly
- * defined, check for the "DATASTORE_HOST" environment variable, used to connect
- * to a local Datastore server.
- *
- * @private
- *
- * @param {object} options - Configuration object.
- * @param {string=} options.apiEndpoint - Custom API endpoint.
- */
-Dataset.determineApiEndpoint_ = function(options) {
- var apiEndpoint = 'https://www.googleapis.com';
- var trailingSlashes = new RegExp('/*$');
-
- if (options.apiEndpoint) {
- apiEndpoint = options.apiEndpoint;
- } else if (process.env.DATASTORE_HOST) {
- apiEndpoint = process.env.DATASTORE_HOST;
- }
-
- if (apiEndpoint.indexOf('http') !== 0) {
- apiEndpoint = 'http://' + apiEndpoint;
- }
-
- return apiEndpoint.replace(trailingSlashes, '');
-};
-
-/**
- * Helper to create a Key object, scoped to the dataset's namespace by default.
- *
- * You may also specify a configuration object to define a namespace and path.
- *
- * @param {...*=} options - Key path. To specify or override a namespace,
- * you must use an object here to explicitly state it.
- * @param {object=} options - Configuration object.
- * @param {...*=} options.path - Key path.
- * @param {string=} options.namespace - Optional namespace.
- * @return {Key} A newly created Key from the options given.
- *
- * @example
- * var key;
- *
- * // Create an incomplete key from the dataset namespace, kind='Company'
- * key = dataset.key('Company');
- *
- * // A complete key from the dataset namespace, kind='Company', id=123
- * key = dataset.key(['Company', 123]);
- *
- * // A complete key from the dataset namespace, kind='Company', name='Google'
- * // Note: `id` is used for numeric identifiers and `name` is used otherwise
- * key = dataset.key(['Company', 'Google']);
- *
- * // A complete key from a provided namespace and path.
- * key = dataset.key({
- * namespace: 'My-NS',
- * path: ['Company', 123]
- * });
- */
-Dataset.prototype.key = function(options) {
- options = is.object(options) ? options : {
- namespace: this.namespace,
- path: arrify(options)
- };
-
- return new entity.Key(options);
-};
-
-/**
- * Create a query from the current dataset to query the specified kind, scoped
- * to the namespace provided at the initialization of the dataset.
- *
- * @resource [Datastore Queries]{@link http://goo.gl/Cag0r6}
- *
- * @borrows {module:datastore/query} as createQuery
- * @see {module:datastore/query}
- *
- * @param {string=} namespace - Optional namespace.
- * @param {string} kind - Kind to query.
- * @return {module:datastore/query}
- */
-Dataset.prototype.createQuery = function(namespace, kind) {
- if (arguments.length === 1) {
- kind = arrify(namespace);
- namespace = this.namespace;
- }
-
- return new Query(namespace, arrify(kind));
-};
-
-/**
- * Run a function in the context of a new transaction. Transactions allow you to
- * perform multiple operations, committing your changes atomically. When you are
- * finished making your changes within the transaction, run the done() function
- * provided in the callback function to commit your changes. See an example
- * below for more information.
- *
- * @resource [Datasets: beginTransaction API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/beginTransaction}
- *
- * @borrows {module:datastore/transaction#begin} as runInTransaction
- *
- * @param {function} fn - The function to run in the context of a transaction.
- * @param {module:datastore/transaction} fn.transaction - The Transaction.
- * @param {function} fn.done - Function used to commit changes.
- * @param {function} callback - The callback function.
- * @param {?error} callback.err - An error returned while making this request
- *
- *
- * @example
- * dataset.runInTransaction(function(transaction, done) {
- * // From the `transaction` object, execute dataset methods as usual.
- * // Call `done` when you're ready to commit all of the changes.
- * transaction.get(dataset.key(['Company', 123]), function(err, entity) {
- * if (err) {
- * transaction.rollback(done);
- * return;
- * }
- *
- * done();
- * });
- * }, function(err, apiResponse) {});
- */
-Dataset.prototype.runInTransaction = function(fn, callback) {
- var newTransaction = this.createTransaction_();
-
- newTransaction.begin_(function(err, resp) {
- if (err) {
- callback(err, resp);
- return;
- }
-
- fn(newTransaction, newTransaction.commit_.bind(newTransaction, callback));
- });
-};
-
-/**
- * Create a new Transaction object using the existing connection and dataset.
- *
- * @return {module:datastore/transaction}
- * @private
- */
-Dataset.prototype.createTransaction_ = function() {
- return new Transaction(this, this.projectId);
-};
-
-module.exports = Dataset;
diff --git a/lib/datastore/datastore_v1.proto b/lib/datastore/datastore_v1.proto
deleted file mode 100644
index bb4c199b116c..000000000000
--- a/lib/datastore/datastore_v1.proto
+++ /dev/null
@@ -1,594 +0,0 @@
-// Copyright 2013 Google Inc. All Rights Reserved.
-//
-// The datastore v1 service proto definitions
-
-syntax = "proto2";
-
-package pb;
-option java_package = "com.google.api.services.datastore";
-
-
-// An identifier for a particular subset of entities.
-//
-// Entities are partitioned into various subsets, each used by different
-// datasets and different namespaces within a dataset and so forth.
-//
-// All input partition IDs are normalized before use.
-// A partition ID is normalized as follows:
-// If the partition ID is unset or is set to an empty partition ID, replace it
-// with the context partition ID.
-// Otherwise, if the partition ID has no dataset ID, assign it the context
-// partition ID's dataset ID.
-// Unless otherwise documented, the context partition ID has the dataset ID set
-// to the context dataset ID and no other partition dimension set.
-//
-// A partition ID is empty if all of its fields are unset.
-//
-// Partition dimension:
-// A dimension may be unset.
-// A dimension's value must never be "".
-// A dimension's value must match [A-Za-z\d\.\-_]{1,100}
-// If the value of any dimension matches regex "__.*__",
-// the partition is reserved/read-only.
-// A reserved/read-only partition ID is forbidden in certain documented contexts.
-//
-// Dataset ID:
-// A dataset id's value must never be "".
-// A dataset id's value must match
-// ([a-z\d\-]{1,100}~)?([a-z\d][a-z\d\-\.]{0,99}:)?([a-z\d][a-z\d\-]{0,99}
-message PartitionId {
- // The dataset ID.
- optional string dataset_id = 3;
- // The namespace.
- optional string namespace = 4;
-}
-
-// A unique identifier for an entity.
-// If a key's partition id or any of its path kinds or names are
-// reserved/read-only, the key is reserved/read-only.
-// A reserved/read-only key is forbidden in certain documented contexts.
-message Key {
- // Entities are partitioned into subsets, currently identified by a dataset
- // (usually implicitly specified by the project) and namespace ID.
- // Queries are scoped to a single partition.
- optional PartitionId partition_id = 1;
-
- // A (kind, ID/name) pair used to construct a key path.
- //
- // At most one of name or ID may be set.
- // If either is set, the element is complete.
- // If neither is set, the element is incomplete.
- message PathElement {
- // The kind of the entity.
- // A kind matching regex "__.*__" is reserved/read-only.
- // A kind must not contain more than 500 characters.
- // Cannot be "".
- required string kind = 1;
- // The ID of the entity.
- // Never equal to zero. Values less than zero are discouraged and will not
- // be supported in the future.
- optional int64 id = 2;
- // The name of the entity.
- // A name matching regex "__.*__" is reserved/read-only.
- // A name must not be more than 500 characters.
- // Cannot be "".
- optional string name = 3;
- }
-
- // The entity path.
- // An entity path consists of one or more elements composed of a kind and a
- // string or numerical identifier, which identify entities. The first
- // element identifies a root entity, the second element identifies
- // a child of the root entity, the third element a child of the
- // second entity, and so forth. The entities identified by all prefixes of
- // the path are called the element's ancestors.
- // An entity path is always fully complete: ALL of the entity's ancestors
- // are required to be in the path along with the entity identifier itself.
- // The only exception is that in some documented cases, the identifier in the
- // last path element (for the entity) itself may be omitted. A path can never
- // be empty.
- repeated PathElement path_element = 2;
-}
-
-// A message that can hold any of the supported value types and associated
-// metadata.
-//
-// At most one of the Value fields may be set.
-// If none are set the value is "null".
-//
-message Value {
- // A boolean value.
- optional bool boolean_value = 1;
- // An integer value.
- optional int64 integer_value = 2;
- // A double value.
- optional double double_value = 3;
- // A timestamp value.
- optional int64 timestamp_microseconds_value = 4;
- // A key value.
- optional Key key_value = 5;
- // A blob key value.
- optional string blob_key_value = 16;
- // A UTF-8 encoded string value.
- optional string string_value = 17;
- // A blob value.
- optional bytes blob_value = 18;
- // An entity value.
- // May have no key.
- // May have a key with an incomplete key path.
- // May have a reserved/read-only key.
- optional Entity entity_value = 6;
- // A list value.
- // Cannot contain another list value.
- // Cannot also have a meaning and indexing set.
- repeated Value list_value = 7;
-
- // The meaning
field is reserved and should not be used.
- optional int32 meaning = 14;
-
- // If the value should be indexed.
- //
- // The indexed
property may be set for a
- // null
value.
- // When indexed
is true
, stringValue
- // is limited to 500 characters and the blob value is limited to 500 bytes.
- // Exception: If meaning is set to 2, string_value is limited to 2038
- // characters regardless of indexed.
- // When indexed is true, meaning 15 and 22 are not allowed, and meaning 16
- // will be ignored on input (and will never be set on output).
- // Input values by default have indexed
set to
- // true
; however, you can explicitly set indexed
to
- // true
if you want. (An output value never has
- // indexed
explicitly set to true
.) If a value is
- // itself an entity, it cannot have indexed
set to
- // true
.
- // Exception: An entity value with meaning 9, 20 or 21 may be indexed.
- optional bool indexed = 15 [default = true];
-}
-
-// An entity property.
-message Property {
- // The name of the property.
- // A property name matching regex "__.*__" is reserved.
- // A reserved property name is forbidden in certain documented contexts.
- // The name must not contain more than 500 characters.
- // Cannot be "".
- required string name = 1;
-
- // The value(s) of the property.
- // Each value can have only one value property populated. For example,
- // you cannot have a values list of { value: { integerValue: 22,
- // stringValue: "a" } }
, but you can have { value: { listValue:
- // [ { integerValue: 22 }, { stringValue: "a" } ] }
.
- required Value value = 4;
-}
-
-// An entity.
-//
-// An entity is limited to 1 megabyte when stored. That roughly
-// corresponds to a limit of 1 megabyte for the serialized form of this
-// message.
-message Entity {
- // The entity's key.
- //
- // An entity must have a key, unless otherwise documented (for example,
- // an entity in Value.entityValue
may have no key).
- // An entity's kind is its key's path's last element's kind,
- // or null if it has no key.
- optional Key key = 1;
- // The entity's properties.
- // Each property's name must be unique for its entity.
- repeated Property property = 2;
-}
-
-// The result of fetching an entity from the datastore.
-message EntityResult {
- // Specifies what data the 'entity' field contains.
- // A ResultType is either implied (for example, in LookupResponse.found it
- // is always FULL) or specified by context (for example, in message
- // QueryResultBatch, field 'entity_result_type' specifies a ResultType
- // for all the values in field 'entity_result').
- enum ResultType {
- FULL = 1; // The entire entity.
- PROJECTION = 2; // A projected subset of properties.
- // The entity may have no key.
- // A property value may have meaning 18.
- KEY_ONLY = 3; // Only the key.
- }
-
- // The resulting entity.
- required Entity entity = 1;
-}
-
-// A query.
-message Query {
- // The projection to return. If not set the entire entity is returned.
- repeated PropertyExpression projection = 2;
-
- // The kinds to query (if empty, returns entities from all kinds).
- repeated KindExpression kind = 3;
-
- // The filter to apply (optional).
- optional Filter filter = 4;
-
- // The order to apply to the query results (if empty, order is unspecified).
- repeated PropertyOrder order = 5;
-
- // The properties to group by (if empty, no grouping is applied to the
- // result set).
- repeated PropertyReference group_by = 6;
-
- // A starting point for the query results. Optional. Query cursors are
- // returned in query result batches.
- optional bytes /* serialized QueryCursor */ start_cursor = 7;
-
- // An ending point for the query results. Optional. Query cursors are
- // returned in query result batches.
- optional bytes /* serialized QueryCursor */ end_cursor = 8;
-
- // The number of results to skip. Applies before limit, but after all other
- // constraints (optional, defaults to 0).
- optional int32 offset = 10 [default=0];
-
- // The maximum number of results to return. Applies after all other
- // constraints. Optional.
- optional int32 limit = 11;
-}
-
-// A representation of a kind.
-message KindExpression {
- // The name of the kind.
- required string name = 1;
-}
-
-// A reference to a property relative to the kind expressions.
-// exactly.
-message PropertyReference {
- // The name of the property.
- required string name = 2;
-}
-
-// A representation of a property in a projection.
-message PropertyExpression {
- enum AggregationFunction {
- FIRST = 1;
- }
- // The property to project.
- required PropertyReference property = 1;
- // The aggregation function to apply to the property. Optional.
- // Can only be used when grouping by at least one property. Must
- // then be set on all properties in the projection that are not
- // being grouped by.
- optional AggregationFunction aggregation_function = 2;
-}
-
-// The desired order for a specific property.
-message PropertyOrder {
- enum Direction {
- ASCENDING = 1;
- DESCENDING = 2;
- }
- // The property to order by.
- required PropertyReference property = 1;
- // The direction to order by.
- optional Direction direction = 2 [default=ASCENDING];
-}
-
-// A holder for any type of filter. Exactly one field should be specified.
-message Filter {
- // A composite filter.
- optional CompositeFilter composite_filter = 1;
- // A filter on a property.
- optional PropertyFilter property_filter = 2;
-}
-
-// A filter that merges the multiple other filters using the given operation.
-message CompositeFilter {
- enum Operator {
- AND = 1;
- }
-
- // The operator for combining multiple filters.
- required Operator operator = 1;
- // The list of filters to combine.
- // Must contain at least one filter.
- repeated Filter filter = 2;
-}
-
-// A filter on a specific property.
-message PropertyFilter {
- enum Operator {
- LESS_THAN = 1;
- LESS_THAN_OR_EQUAL = 2;
- GREATER_THAN = 3;
- GREATER_THAN_OR_EQUAL = 4;
- EQUAL = 5;
-
- HAS_ANCESTOR = 11;
- }
-
- // The property to filter by.
- required PropertyReference property = 1;
- // The operator to filter by.
- required Operator operator = 2;
- // The value to compare the property to.
- required Value value = 3;
-}
-
-// A GQL query.
-message GqlQuery {
- required string query_string = 1;
- // When false, the query string must not contain a literal.
- optional bool allow_literal = 2 [default = false];
- // A named argument must set field GqlQueryArg.name.
- // No two named arguments may have the same name.
- // For each non-reserved named binding site in the query string,
- // there must be a named argument with that name,
- // but not necessarily the inverse.
- repeated GqlQueryArg name_arg = 3;
- // Numbered binding site @1 references the first numbered argument,
- // effectively using 1-based indexing, rather than the usual 0.
- // A numbered argument must NOT set field GqlQueryArg.name.
- // For each binding site numbered i in query_string,
- // there must be an ith numbered argument.
- // The inverse must also be true.
- repeated GqlQueryArg number_arg = 4;
-}
-
-// A binding argument for a GQL query.
-// Exactly one of fields value and cursor must be set.
-message GqlQueryArg {
- // Must match regex "[A-Za-z_$][A-Za-z_$0-9]*".
- // Must not match regex "__.*__".
- // Must not be "".
- optional string name = 1;
- optional Value value = 2;
- optional bytes cursor = 3;
-}
-
-// A batch of results produced by a query.
-message QueryResultBatch {
- // The possible values for the 'more_results' field.
- enum MoreResultsType {
- NOT_FINISHED = 1; // There are additional batches to fetch from this query.
- MORE_RESULTS_AFTER_LIMIT = 2; // The query is finished, but there are more
- // results after the limit.
- NO_MORE_RESULTS = 3; // The query has been exhausted.
- }
-
- // The result type for every entity in entityResults.
- required EntityResult.ResultType entity_result_type = 1;
- // The results for this batch.
- repeated EntityResult entity_result = 2;
-
- // A cursor that points to the position after the last result in the batch.
- // May be absent.
- optional bytes /* serialized QueryCursor */ end_cursor = 4;
-
- // The state of the query after the current batch.
- required MoreResultsType more_results = 5;
-
- // The number of results skipped because of Query.offset
.
- optional int32 skipped_results = 6;
-}
-
-// A set of changes to apply.
-//
-// No entity in this message may have a reserved property name,
-// not even a property in an entity in a value.
-// No value in this message may have meaning 18,
-// not even a value in an entity in another value.
-//
-// If entities with duplicate keys are present, an arbitrary choice will
-// be made as to which is written.
-message Mutation {
- // Entities to upsert.
- // Each upserted entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity upsert = 1;
- // Entities to update.
- // Each updated entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity update = 2;
- // Entities to insert.
- // Each inserted entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity insert = 3;
- // Insert entities with a newly allocated ID.
- // Each inserted entity's key must omit the final identifier in its path and
- // must not be reserved/read-only.
- repeated Entity insert_auto_id = 4;
- // Keys of entities to delete.
- // Each key must have a complete key path and must not be reserved/read-only.
- repeated Key delete = 5;
- // Ignore a user specified read-only period. Optional.
- optional bool force = 6;
-}
-
-// The result of applying a mutation.
-message MutationResult {
- // Number of index writes.
- required int32 index_updates = 1;
- // Keys for insertAutoId
entities. One per entity from the
- // request, in the same order.
- repeated Key insert_auto_id_key = 2;
-}
-
-// Options shared by read requests.
-message ReadOptions {
- enum ReadConsistency {
- DEFAULT = 0;
- STRONG = 1;
- EVENTUAL = 2;
- }
-
- // The read consistency to use.
- // Cannot be set when transaction is set.
- // Lookup and ancestor queries default to STRONG, global queries default to
- // EVENTUAL and cannot be set to STRONG.
- optional ReadConsistency read_consistency = 1 [default=DEFAULT];
-
- // The transaction to use. Optional.
- optional bytes /* serialized Transaction */ transaction = 2;
-}
-
-// The request for Lookup.
-message LookupRequest {
-
- // Options for this lookup request. Optional.
- optional ReadOptions read_options = 1;
- // Keys of entities to look up from the datastore.
- repeated Key key = 3;
-}
-
-// The response for Lookup.
-message LookupResponse {
-
- // The order of results in these fields is undefined and has no relation to
- // the order of the keys in the input.
-
- // Entities found as ResultType.FULL entities.
- repeated EntityResult found = 1;
-
- // Entities not found as ResultType.KEY_ONLY entities.
- repeated EntityResult missing = 2;
-
- // A list of keys that were not looked up due to resource constraints.
- repeated Key deferred = 3;
-}
-
-
-// The request for RunQuery.
-message RunQueryRequest {
-
- // The options for this query.
- optional ReadOptions read_options = 1;
-
- // Entities are partitioned into subsets, identified by a dataset (usually
- // implicitly specified by the project) and namespace ID. Queries are scoped
- // to a single partition.
- // This partition ID is normalized with the standard default context
- // partition ID, but all other partition IDs in RunQueryRequest are
- // normalized with this partition ID as the context partition ID.
- optional PartitionId partition_id = 2;
-
- // The query to run.
- // Either this field or field gql_query must be set, but not both.
- optional Query query = 3;
- // The GQL query to run.
- // Either this field or field query must be set, but not both.
- optional GqlQuery gql_query = 7;
-}
-
-// The response for RunQuery.
-message RunQueryResponse {
-
- // A batch of query results (always present).
- optional QueryResultBatch batch = 1;
-
-}
-
-// The request for BeginTransaction.
-message BeginTransactionRequest {
-
- enum IsolationLevel {
- SNAPSHOT = 0; // Read from a consistent snapshot. Concurrent transactions
- // conflict if their mutations conflict. For example:
- // Read(A),Write(B) may not conflict with Read(B),Write(A),
- // but Read(B),Write(B) does conflict with Read(B),Write(B).
- SERIALIZABLE = 1; // Read from a consistent snapshot. Concurrent
- // transactions conflict if they cannot be serialized.
- // For example Read(A),Write(B) does conflict with
- // Read(B),Write(A) but Read(A) may not conflict with
- // Write(A).
- }
-
- // The transaction isolation level.
- optional IsolationLevel isolation_level = 1 [default=SNAPSHOT];
-}
-
-// The response for BeginTransaction.
-message BeginTransactionResponse {
-
- // The transaction identifier (always present).
- optional bytes /* serialized Transaction */ transaction = 1;
-}
-
-// The request for Rollback.
-message RollbackRequest {
-
- // The transaction identifier, returned by a call to
- // beginTransaction
.
- required bytes /* serialized Transaction */ transaction = 1;
-}
-
-// The response for Rollback.
-message RollbackResponse {
-// Empty
-}
-
-// The request for Commit.
-message CommitRequest {
-
- enum Mode {
- TRANSACTIONAL = 1;
- NON_TRANSACTIONAL = 2;
- }
-
- // The transaction identifier, returned by a call to
- // beginTransaction
. Must be set when mode is TRANSACTIONAL.
- optional bytes /* serialized Transaction */ transaction = 1;
- // The mutation to perform. Optional.
- optional Mutation mutation = 2;
- // The type of commit to perform. Either TRANSACTIONAL or NON_TRANSACTIONAL.
- optional Mode mode = 5 [default=TRANSACTIONAL];
-}
-
-// The response for Commit.
-message CommitResponse {
-
- // The result of performing the mutation (if any).
- optional MutationResult mutation_result = 1;
-}
-
-// The request for AllocateIds.
-message AllocateIdsRequest {
-
- // A list of keys with incomplete key paths to allocate IDs for.
- // No key may be reserved/read-only.
- repeated Key key = 1;
-}
-
-// The response for AllocateIds.
-message AllocateIdsResponse {
-
- // The keys specified in the request (in the same order), each with
- // its key path completed with a newly allocated ID.
- repeated Key key = 1;
-}
-
-// Each rpc normalizes the partition IDs of the keys in its input entities,
-// and always returns entities with keys with normalized partition IDs.
-// (Note that applies to all entities, including entities in values.)
-service DatastoreService {
- // Look up some entities by key.
- rpc Lookup(LookupRequest) returns (LookupResponse) {
- };
- // Query for entities.
- rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) {
- };
- // Begin a new transaction.
- rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
- };
- // Commit a transaction, optionally creating, deleting or modifying some
- // entities.
- rpc Commit(CommitRequest) returns (CommitResponse) {
- };
- // Roll back a transaction.
- rpc Rollback(RollbackRequest) returns (RollbackResponse) {
- };
- // Allocate IDs for incomplete keys (useful for referencing an entity before
- // it is inserted).
- rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) {
- };
-}
diff --git a/lib/datastore/entity.js b/lib/datastore/entity.js
index 0ce930954040..e84e6232c298 100644
--- a/lib/datastore/entity.js
+++ b/lib/datastore/entity.js
@@ -21,23 +21,49 @@
'use strict';
+var createErrorClass = require('create-error-class');
var is = require('is');
-/** @const {object} Map for query operation -> operation protocol value. */
-var OP_TO_OPERATOR = {
- '=': 'EQUAL',
- '>': 'GREATER_THAN',
- '>=': 'GREATER_THAN_OR_EQUAL',
- '<': 'LESS_THAN',
- '<=': 'LESS_THAN_OR_EQUAL',
- HAS_ANCESTOR: 'HAS_ANCESTOR'
-};
-
-/** @const {object} Conversion map for query sign -> order protocol value. */
-var SIGN_TO_ORDER = {
- '-': 'DESCENDING',
- '+': 'ASCENDING'
-};
+var entity = module.exports;
+
+var InvalidKeyError = createErrorClass('InvalidKey', function(opts) {
+ var errorMessages = {
+ MISSING_KIND: 'A key should contain at least a kind.',
+ MISSING_ANCESTOR_ID: 'Ancestor keys require an id or name.'
+ };
+
+ this.message = errorMessages[opts.code];
+});
+
+/**
+ * Build a Datastore Double object.
+ *
+ * @constructor
+ * @param {number} value - The double value.
+ *
+ * @example
+ * var aDouble = new Double(7.3);
+ */
+function Double(value) {
+ this.value = value;
+}
+
+entity.Double = Double;
+
+/**
+ * Build a Datastore Int object.
+ *
+ * @constructor
+ * @param {number} value - The integer value.
+ *
+ * @example
+ * var anInt = new Int(7);
+ */
+function Int(value) {
+ this.value = value;
+}
+
+entity.Int = Int;
/**
* Build a Datastore Key object.
@@ -67,186 +93,234 @@ function Key(options) {
});
}
-module.exports.Key = Key;
+entity.Key = Key;
/**
- * Build a Datastore Int object.
+ * Convert a protobuf Value message to its native value.
*
- * @constructor
- * @param {number} val - The integer value.
+ * @param {object} valueProto - The protobuf Value message to convert.
+ * @return {*}
*
* @example
- * var anInt = new Int(7);
- */
-function Int(val) {
- this.val_ = val;
-}
-
-/**
- * Retrieve the Integer value.
+ * decodeValueProto({
+ * booleanValue: false
+ * });
+ * // false
*
- * @return {number}
+ * decodeValueProto({
+ * stringValue: 'Hi'
+ * });
+ * // 'Hi'
+ *
+ * decodeValueProto({
+ * blobValue: new Buffer('68656c6c6f')
+ * });
+ * //
*/
-Int.prototype.get = function() {
- return this.val_;
-};
+function decodeValueProto(valueProto) {
+ var valueType = valueProto.value_type;
+ var value = valueProto[valueType];
+
+ switch (valueType) {
+ case 'arrayValue': {
+ return value.values.map(entity.decodeValueProto);
+ }
+
+ case 'blobValue': {
+ return value.toBuffer();
+ }
+
+ case 'doubleValue': {
+ return parseFloat(value.toString(), 10);
+ }
+
+ case 'integerValue': {
+ return parseInt(value.toString(), 10);
+ }
+
+ case 'entityValue': {
+ return entity.entityFromEntityProto(value);
+ }
+
+ case 'keyValue': {
+ return entity.keyFromKeyProto(value);
+ }
+
+ case 'timestampValue': {
+ return new Date(parseInt(value.seconds, 10) * 1000);
+ }
+
+ default: {
+ return value;
+ }
+ }
+}
-module.exports.Int = Int;
+entity.decodeValueProto = decodeValueProto;
/**
- * Build a Datastore Double object.
+ * Convert any native value to a protobuf Value message object.
*
- * @constructor
- * @param {number} val - The double value.
+ * @param {*} value - Native value.
+ * @return {object}
*
* @example
- * var aDouble = new Double(7.3);
+ * encodeValue('Hi');
+ * // {
+ * // stringValue: 'Hi'
+ * // }
*/
-function Double(val) {
- this.val_ = val;
-}
+function encodeValue(value) {
+ var valueProto = {};
-/**
- * Retrieve the Double value.
- *
- * @return {number}
- */
-Double.prototype.get = function() {
- return this.val_;
-};
+ if (is.boolean(value)) {
+ valueProto.booleanValue = value;
+ return valueProto;
+ }
-module.exports.Double = Double;
+ if (is.number(value)) {
+ if (value % 1 === 0) {
+ value = new entity.Int(value);
+ } else {
+ value = new entity.Double(value);
+ }
+ }
+
+ if (value instanceof entity.Int) {
+ valueProto.integerValue = value.value;
+ return valueProto;
+ }
+
+ if (value instanceof entity.Double) {
+ valueProto.doubleValue = value.value;
+ return valueProto;
+ }
+
+ if (value instanceof Date) {
+ valueProto.timestampValue = {
+ seconds: value.getTime() / 1000,
+ nanos: value.getTime() * 1e6
+ };
+ return valueProto;
+ }
+
+ if (is.string(value)) {
+ valueProto.stringValue = value;
+ return valueProto;
+ }
+
+ if (value instanceof Buffer) {
+ valueProto.blobValue = value;
+ return valueProto;
+ }
+
+ if (is.array(value)) {
+ valueProto.arrayValue = {
+ values: value.map(entity.encodeValue)
+ };
+ return valueProto;
+ }
+
+ if (value instanceof entity.Key) {
+ valueProto.keyValue = entity.keyToKeyProto(value);
+ return valueProto;
+ }
+
+ if (is.object(value) && !is.empty(value)) {
+ var properties = Object.keys(value).map(function(key) {
+ return {
+ name: key,
+ value: entity.encodeValue(value[key])
+ };
+ });
+
+ valueProto.entityValue = {
+ properties: properties
+ };
+
+ return valueProto;
+ }
+
+ throw new Error('Unsupported field value, ' + value + ', was provided.');
+}
+
+entity.encodeValue = encodeValue;
/**
* Convert any entity protocol to a plain object.
*
* @todo Use registered metadata if provided.
*
- * @param {object} proto - The protocol entity object to convert.
+ * @param {object} entityProto - The protocol entity object to convert.
* @return {object}
*
* @example
- * var entity = entityFromEntityProto({
- * property: [
- * {
+ * entityFromEntityProto({
+ * properties: {
+ * map: {
* name: {
- * stringValue: 'Burcu Dogan'
+ * value: {
+ * value_type: 'stringValue',
+ * stringValue: 'Stephen'
+ * }
* }
* }
- * ]
+ * }
* });
- *
- * // entity:
* // {
- * // name: 'Burcu Dogan'
+ * // name: 'Stephen'
* // }
*/
-function entityFromEntityProto(proto) {
- var properties = proto.property || [];
- return Object.keys(properties).reduce(function(acc, key) {
- var property = properties[key];
- acc[property.name] = propertyToValue(property.value);
- return acc;
- }, {});
-}
-
-module.exports.entityFromEntityProto = entityFromEntityProto;
+function entityFromEntityProto(entityProto) {
+ var entityObject = {};
-/**
- * Convert a key protocol object to a Key object.
- *
- * @param {object} proto - The key protocol object to convert.
- * @return {Key}
- *
- * @example
- * var key = keyFromKeyProto({
- * partitionId: {
- * datasetId: 'project-id',
- * namespace: ''
- * },
- * path: [
- * {
- * kind: 'Kind',
- * id: '4790047639339008'
- * }
- * ]
- * });
- */
-function keyFromKeyProto(proto) {
- var keyOptions = {
- path: []
- };
+ var properties = entityProto.properties || {};
- if (proto.partition_id && proto.partition_id.namespace) {
- keyOptions.namespace = proto.partition_id.namespace;
+ for (var property in properties.map) {
+ var value = properties.map[property].value;
+ entityObject[property] = entity.decodeValueProto(value);
}
- proto.path_element.forEach(function(path, index) {
- var id = Number(path.id) || path.name;
- keyOptions.path.push(path.kind);
- if (id) {
- keyOptions.path.push(id);
- } else if (index < proto.path_element.length - 1) {
- throw new Error('Invalid key. Ancestor keys require an id or name.');
- }
- });
-
- return new Key(keyOptions);
+ return entityObject;
}
-module.exports.keyFromKeyProto = keyFromKeyProto;
+entity.entityFromEntityProto = entityFromEntityProto;
/**
- * Convert a Key object to a key protocol object.
+ * Convert an entity object to an entity protocol object.
*
- * @param {Key} key - The Key object to convert.
+ * @param {object} entityObject - The entity object to convert.
* @return {object}
*
* @example
- * var keyProto = keyToKeyProto(new Key(['Company', 1]));
- *
- * // keyProto:
+ * entityToEntityProto({
+ * name: 'Burcu',
+ * legit: true
+ * });
* // {
- * // path: [
- * // {
- * // kind: 'Company',
- * // id: 1
+ * // key: null,
+ * // properties: {
+ * // name: {
+ * // stringValue: 'Burcu'
+ * // },
+ * // legit: {
+ * // booleanValue: true
* // }
- * // ]
+ * // }
* // }
*/
-function keyToKeyProto(key) {
- var keyPath = key.path;
- if (keyPath.length === 0) {
- throw new Error('A key should contain at least a kind.');
- }
- var path = [];
- for (var i = 0; i < keyPath.length; i += 2) {
- var p = { kind: keyPath[i] };
- var val = keyPath[i + 1];
- if (val) {
- if (is.number(val)) {
- p.id = val;
- } else {
- p.name = val;
- }
- } else if (i < keyPath.length - 2) { // i is second last path item
- throw new Error('Invalid key. Ancestor keys require an id or name.');
- }
- path.push(p);
- }
- var proto = {
- path_element: path
+function entityToEntityProto(entityObject) {
+ return {
+ key: null,
+
+ properties: Object.keys(entityObject).reduce(function(properties, key) {
+ properties[key] = entity.encodeValue(entityObject[key]);
+ return properties;
+ }, {})
};
- if (key.namespace) {
- proto.partition_id = {
- namespace: key.namespace
- };
- }
- return proto;
}
-module.exports.keyToKeyProto = keyToKeyProto;
+entity.entityToEntityProto = entityToEntityProto;
/**
* Convert an API response array to a qualified Key and data object.
@@ -259,8 +333,6 @@ module.exports.keyToKeyProto = keyToKeyProto;
* @example
* makeReq('runQuery', {}, function(err, response) {
* var entityObjects = formatArray(response.batch.entityResults);
- *
- * // entityObjects:
* // {
* // key: {},
* // data: {
@@ -273,13 +345,13 @@ module.exports.keyToKeyProto = keyToKeyProto;
function formatArray(results) {
return results.map(function(result) {
return {
- key: keyFromKeyProto(result.entity.key),
- data: entityFromEntityProto(result.entity)
+ key: entity.keyFromKeyProto(result.entity.key),
+ data: entity.entityFromEntityProto(result.entity)
};
});
}
-module.exports.formatArray = formatArray;
+entity.formatArray = formatArray;
/**
* Check if a key is complete.
@@ -291,196 +363,122 @@ module.exports.formatArray = formatArray;
* isKeyComplete(new Key(['Company', 'Google'])); // true
* isKeyComplete(new Key('Company')); // false
*/
-module.exports.isKeyComplete = function(key) {
- var proto = keyToKeyProto(key);
- for (var i = 0; i < proto.path_element.length; i++) {
- if (!proto.path_element[i].kind) {
- return false;
- }
- if (!proto.path_element[i].id && !proto.path_element[i].name) {
- return false;
- }
- }
- return true;
-};
+function isKeyComplete(key) {
+ var lastPathElement = entity.keyToKeyProto(key).path.pop();
+ return !!(lastPathElement.id || lastPathElement.name);
+}
+
+entity.isKeyComplete = isKeyComplete;
/**
- * Convert a protocol property to it's native value.
- *
- * @todo Do we need uint64s and keep Long.js support?
+ * Convert a key protocol object to a Key object.
*
- * @param {object} property - The property object to convert.
- * @return {*}
+ * @param {object} keyProto - The key protocol object to convert.
+ * @return {Key}
*
* @example
- * propertyToValue({
- * boolean_value: false
- * });
- * // false
- *
- * propertyToValue({
- * string_value: 'Hi'
- * });
- * // 'Hi'
- *
- * propertyToValue({
- * blob_value: new Buffer('68656c6c6f')
+ * var key = keyFromKeyProto({
+ * partitionId: {
+ * projectId: 'project-id',
+ * namespaceId: ''
+ * },
+ * path: [
+ * {
+ * kind: 'Kind',
+ * id: '4790047639339008'
+ * }
+ * ]
* });
- * //
*/
-function propertyToValue(property) {
- if (exists(property.integer_value)) {
- return parseInt(property.integer_value.toString(), 10);
- }
- if (exists(property.double_value)) {
- return property.double_value;
- }
- if (exists(property.string_value)) {
- return property.string_value;
- }
- if (exists(property.blob_value)) {
- return property.blob_value.toBuffer();
- }
- if (exists(property.timestamp_microseconds_value)) {
- var microSecs = parseInt(
- property.timestamp_microseconds_value.toString(), 10);
- return new Date(microSecs / 1000);
- }
- if (exists(property.key_value)) {
- return keyFromKeyProto(property.key_value);
- }
- if (exists(property.entity_value)) {
- return entityFromEntityProto(property.entity_value);
- }
- if (exists(property.boolean_value)) {
- return property.boolean_value;
+function keyFromKeyProto(keyProto) {
+ var keyOptions = {
+ path: []
+ };
+
+ if (keyProto.partitionId && keyProto.partitionId.namespaceId) {
+ keyOptions.namespaceId = keyProto.partitionId.namespaceId;
}
- if (exists(property.list_value)) {
- var list = [];
- for (var i = 0; i < property.list_value.length; i++) {
- list.push(propertyToValue(property.list_value[i]));
+
+ keyProto.path.forEach(function(path, index) {
+ var id = path.name || Number(path.id);
+
+ keyOptions.path.push(path.kind);
+
+ if (id) {
+ keyOptions.path.push(id);
+ } else if (index < keyProto.path.length - 1) {
+ throw new InvalidKeyError({
+ code: 'MISSING_ANCESTOR_ID'
+ });
}
- return list;
- }
+ });
+
+ return new entity.Key(keyOptions);
}
-module.exports.propertyToValue = propertyToValue;
+entity.keyFromKeyProto = keyFromKeyProto;
/**
- * Convert any native value to a property object.
+ * Convert a Key object to a key protocol object.
*
- * @param {*} v - Original value.
+ * @param {Key} key - The Key object to convert.
* @return {object}
*
* @example
- * valueToProperty('Hi');
+ * var keyProto = keyToKeyProto(new Key(['Company', 1]));
* // {
- * // stringValue: 'Hi'
+ * // path: [
+ * // {
+ * // kind: 'Company',
+ * // id: 1
+ * // }
+ * // ]
* // }
*/
-function valueToProperty(v) {
- var p = {};
- if (v instanceof Boolean || typeof v === 'boolean') {
- p.boolean_value = v;
- return p;
- }
- if (v instanceof Int) {
- p.integer_value = v.get();
- return p;
- }
- if (v instanceof Double) {
- p.double_value = v.get();
- return p;
- }
- if (v instanceof Number || typeof v === 'number') {
- if (v % 1 === 0) {
- p.integer_value = v;
- } else {
- p.double_value = v;
- }
- return p;
- }
- if (v instanceof Date) {
- p.timestamp_microseconds_value = v.getTime() * 1000;
- return p;
- }
- if (v instanceof String || typeof v === 'string') {
- p.string_value = v;
- return p;
- }
- if (v instanceof Buffer) {
- p.blob_value = v;
- return p;
- }
- if (Array.isArray(v)) {
- p.list_value = v.map(function(item) {
- return valueToProperty(item);
+function keyToKeyProto(key) {
+ if (key.path.length === 0) {
+ throw new InvalidKeyError({
+ code: 'MISSING_KIND'
});
- return p;
}
- if (v instanceof Key) {
- p.key_value = keyToKeyProto(v);
- return p;
+
+ var keyProto = {
+ path: []
+ };
+
+ if (key.namespace) {
+ keyProto.partitionId = {
+ namespaceId: key.namespace
+ };
}
- if (v instanceof Object && Object.keys(v).length > 0) {
- var property = [];
- Object.keys(v).forEach(function(k) {
- property.push({
- name: k,
- value: valueToProperty(v[k])
+
+ for (var i = 0; i < key.path.length; i += 2) {
+ var pathElement = {
+ kind: key.path[i]
+ };
+
+ var value = key.path[i + 1];
+
+ if (value) {
+ if (is.number(value)) {
+ pathElement.id = value;
+ } else {
+ pathElement.name = value;
+ }
+ } else if (i < key.path.length - 2) {
+ // This isn't just an incomplete key. An ancestor key is incomplete.
+ throw new InvalidKeyError({
+ code: 'MISSING_ANCESTOR_ID'
});
- });
- p.entity_value = { property: property };
- p.indexed = false;
- return p;
- }
- throw new Error('Unsupported field value, ' + v + ', is provided.');
-}
+ }
-module.exports.valueToProperty = valueToProperty;
+ keyProto.path.push(pathElement);
+ }
-/**
- * Convert an entity object to an entity protocol object.
- *
- * @param {object} entity - The entity object to convert.
- * @return {object}
- *
- * @example
- * entityToEntityProto({
- * name: 'Burcu',
- * legit: true
- * });
- * // {
- * // key: null,
- * // property: [
- * // {
- * // name: 'name',
- * // value: {
- * // string_value: 'Burcu'
- * // }
- * // },
- * // {
- * // name: 'legit',
- * // value: {
- * // boolean_value: true
- * // }
- * // }
- * // }
- * // }
- */
-function entityToEntityProto(entity) {
- return {
- key: null,
- property: Object.keys(entity).map(function(key) {
- return {
- name: key,
- value: valueToProperty(entity[key])
- };
- })
- };
+ return keyProto;
}
-module.exports.entityToEntityProto = entityToEntityProto;
+entity.keyToKeyProto = keyToKeyProto;
/**
* Convert a query object to a query protocol object.
@@ -516,70 +514,100 @@ module.exports.entityToEntityProto = entityToEntityProto;
* // groupBy: []
* // }
*/
-function queryToQueryProto(q) {
- var query = {};
- query.projection = q.selectVal.map(function(v) {
- return { property: { name: v } };
- });
- query.kind = q.kinds.map(function(k) {
- return { name: k };
- });
- // filters
- if (q.filters.length > 0) {
- var filters = q.filters.map(function(f) {
- var val = {};
- if (f.name === '__key__') {
- val.key_value = keyToKeyProto(f.val);
- } else {
- val = valueToProperty(f.val);
- }
- var property = {
- property: { name: f.name },
- operator: OP_TO_OPERATOR[f.op],
- value: val
+function queryToQueryProto(query) {
+ var OP_TO_OPERATOR = {
+ '=': 'EQUAL',
+ '>': 'GREATER_THAN',
+ '>=': 'GREATER_THAN_OR_EQUAL',
+ '<': 'LESS_THAN',
+ '<=': 'LESS_THAN_OR_EQUAL',
+ HAS_ANCESTOR: 'HAS_ANCESTOR'
+ };
+
+ var SIGN_TO_ORDER = {
+ '-': 'DESCENDING',
+ '+': 'ASCENDING'
+ };
+
+ var queryProto = {
+ distinctOn: query.groupByVal.map(function(groupBy) {
+ return {
+ name: groupBy
};
- return { property_filter: property };
- });
- query.filter = {
- composite_filter: { filter: filters, operator: 'AND' }
- };
+ }),
+
+ kind: query.kinds.map(function(kind) {
+ return {
+ name: kind
+ };
+ }),
+
+ order: query.orders.map(function(order) {
+ return {
+ property: {
+ name: order.name
+ },
+ direction: SIGN_TO_ORDER[order.sign]
+ };
+ }),
+
+ projection: query.selectVal.map(function(select) {
+ return {
+ property: {
+ name: select
+ }
+ };
+ })
+ };
+
+ if (query.endVal) {
+ queryProto.endCursor = new Buffer(query.endVal, 'base64');
}
- query.order = q.orders.map(function(o) {
- return {
- property: { name: o.name },
- direction: SIGN_TO_ORDER[o.sign]
+
+ if (query.limitVal > 0) {
+ queryProto.limit = {
+ value: query.limitVal
};
- });
- query.group_by = q.groupByVal.map(function(g) {
- return { name: g };
- });
- // pagination
- if (q.startVal) {
- query.start_cursor = new Buffer(q.startVal, 'base64');
- }
- if (q.endVal) {
- query.end_cursor = new Buffer(q.endVal, 'base64');
}
- if (q.offsetVal > 0) {
- query.offset = q.offsetVal;
+
+ if (query.offsetVal > 0) {
+ queryProto.offset = query.offsetVal;
}
- if (q.limitVal > 0) {
- query.limit = q.limitVal;
+
+ if (query.startVal) {
+ queryProto.startCursor = new Buffer(query.startVal, 'base64');
}
- return query;
-}
-module.exports.queryToQueryProto = queryToQueryProto;
+ if (query.filters.length > 0) {
+ var filters = query.filters.map(function(filter) {
+ var value = {};
-/**
- * Does a value exist?
- *
- * @todo If protobufjs had hasFieldname support, we wouldn't need a utility.
- * Later address it on Protobuf.js.
- *
- * @param {*} value - Value.
- * @return {boolean}
- */
-function exists(value) {
- return (value !== null && value !== undefined);
+ if (filter.name === '__key__') {
+ value.keyValue = entity.keyToKeyProto(filter.val);
+ } else {
+ value = entity.encodeValue(filter.val);
+ }
+
+ return {
+ propertyFilter: {
+ property: {
+ name: filter.name
+ },
+ op: OP_TO_OPERATOR[filter.op],
+ value: value
+ }
+ };
+ });
+
+ queryProto.filter = {
+ compositeFilter: {
+ filters: filters,
+ op: 'AND'
+ }
+ };
+ }
+
+ return queryProto;
}
+
+entity.queryToQueryProto = queryToQueryProto;
diff --git a/lib/datastore/index.js b/lib/datastore/index.js
index b94789ab246f..2f7fc877f236 100644
--- a/lib/datastore/index.js
+++ b/lib/datastore/index.js
@@ -20,12 +20,34 @@
'use strict';
+var arrify = require('arrify');
+var is = require('is');
+var nodeutil = require('util');
+
+/**
+ * @type {module:datastore/request}
+ * @private
+ */
+var DatastoreRequest = require('./request.js');
+
/**
* @type {module:datastore/entity}
* @private
*/
var entity = require('./entity');
+/**
+ * @type {module:datastore/query}
+ * @private
+ */
+var Query = require('./query.js');
+
+/**
+ * @type {module:datastore/transaction}
+ * @private
+ */
+var Transaction = require('./transaction.js');
+
/**
* @type {module:common/util}
* @private
@@ -33,37 +55,27 @@ var entity = require('./entity');
var util = require('../common/util.js');
/**
- * @type {module:datastore/dataset}
+ * Scopes for Google Datastore access.
+ * @const {array} SCOPES
* @private
*/
-var Dataset = require('./dataset');
+var SCOPES = [
+ 'https://www.googleapis.com/auth/datastore'
+];
-/*! Developer Documentation
- *
- * Invoking the Datastore class allows you to provide configuration up-front.
- * This configuration will be used for future invocations of the returned
- * `dataset` method.
- *
- * @example
- * var datastore = gcloud.datastore;
- *
- * // datastore.dataset();
- * //
- * // is equal to...
- * //
- * // datastore.dataset({
- * // projectId: 'grape-spaceship-123',
- * // keyFilename: '/path/to/keyfile.json'
- * // });
- */
/**
- * The example below will demonstrate the different usage patterns your app may
- * need to support to retrieve a datastore object.
+ * Interact with the
+ * [Google Cloud Datastore](https://developers.google.com/datastore/).
*
- * @alias module:datastore
* @constructor
+ * @alias module:datastore
+ * @mixes module:datastore/request
*
- * @param {object} options - [Configuration object](#/docs/?method=gcloud).
+ * @param {object=} options - [Configuration object](#/docs/?method=gcloud).
+ * @param {string=} options.apiEndpoint - Override the default API endpoint used
+ * to reach Datastore. This is useful for connecting to your local Datastore
+ * server (usually "http://localhost:8080").
+ * @param {string} options.namespace - Namespace to isolate transactions to.
*
* @example
* var gcloud = require('gcloud')({
@@ -71,50 +83,55 @@ var Dataset = require('./dataset');
* keyFilename: '/path/to/keyfile.json'
* });
*
- * var datastore = gcloud.datastore;
+ * var datastore = gcloud.datastore();
+ *
+ * //-
+ * // Connect to your local Datastore server.
+ * //-
+ * var datastore = gcloud.datastore({
+ * apiEndpoint: 'http://localhost:8080'
+ * });
+ *
+ * //-
+ * // The `process.env.DATASTORE_HOST` environment variable is also recognized.
+ * // If set, you may omit the `apiEndpoint` option.
+ * //-
*/
function Datastore(options) {
- this.config = options || {};
+ if (!(this instanceof Datastore)) {
+ options = util.normalizeArguments(this, options);
+ return new Datastore(options);
+ }
+
+ this.makeAuthenticatedRequest_ = util.makeAuthenticatedRequestFactory({
+ customEndpoint: typeof options.apiEndpoint !== 'undefined',
+ credentials: options.credentials,
+ keyFile: options.keyFilename,
+ scopes: SCOPES,
+ email: options.email
+ });
+
+ this.apiEndpoint = Datastore.determineApiEndpoint_(options);
+ this.namespace = options.namespace;
+ this.projectId = options.projectId;
}
-/*! Developer Documentation
- *
- * Use this static method to create a dataset without any pre-configured
- * options.
- *
- * @example
- * var datastore = gcloud.datastore;
- *
- * // Create a Dataset object.
- * var dataset = datastore.dataset({
- * projectId: 'grape-spaceship-123',
- * keyFilename: '/path/to/keyfile.json'
- * });
- */
-Datastore.dataset = Dataset;
+nodeutil.inherits(Datastore, DatastoreRequest);
-/*! Developer Documentation
+/**
+ * Helper function to get a Datastore Double object.
*
- * Create a dataset using the instance method when you want to use your
- * pre-configured options from the Datastore instance.
+ * @param {number} value - The double value.
+ * @return {object}
*
- * @param {object=} options - Configuration object.
- * @return {module:datastore/dataset}
- */
-/**
- * Create a Dataset object to reference an existing dataset.
+ * @example
+ * var gcloud = require('gcloud');
*
- * @param {object=} options - [Configuration object](#/docs/?method=gcloud).
- * @param {string=} options.apiEndpoint - Override the default API endpoint used
- * to reach Datastore. This is useful for connecting to your local Datastore
- * server (usually "http://localhost:8080").
- * @param {string} options.namespace - Namespace to isolate transactions to.
- * @return {module:datastore/dataset}
+ * // Create a Double.
+ * var threeDouble = gcloud.datastore.double(3.0);
*/
-Datastore.prototype.dataset = function(options) {
- options = options || {};
- // Mix in global config data to the provided options.
- return new Dataset(util.extendGlobalConfig(this.config, options));
+Datastore.double = function(value) {
+ return new entity.Double(value);
};
/**
@@ -134,19 +151,149 @@ Datastore.int = function(value) {
};
/**
- * Helper function to get a Datastore Double object.
+ * Determine the appropriate endpoint to use for API requests. If not explicitly
+ * defined, check for the "DATASTORE_HOST" environment variable, used to connect
+ * to a local Datastore server.
*
- * @param {number} value - The double value.
- * @return {object}
+ * @private
+ *
+ * @param {object} options - Configuration object.
+ * @param {string=} options.apiEndpoint - Custom API endpoint.
+ */
+Datastore.determineApiEndpoint_ = function(options) {
+ var apiEndpoint = 'https://datastore.googleapis.com';
+ var trailingSlashes = new RegExp('/*$');
+
+ if (options.apiEndpoint) {
+ apiEndpoint = options.apiEndpoint;
+ } else if (process.env.DATASTORE_HOST) {
+ apiEndpoint = process.env.DATASTORE_HOST;
+ }
+
+ if (apiEndpoint.indexOf('http') !== 0) {
+ apiEndpoint = 'http://' + apiEndpoint;
+ }
+
+ return apiEndpoint.replace(trailingSlashes, '');
+};
+
+/**
+ * Create a query for the specified kind.
+ *
+ * @resource [Datastore Queries]{@link http://goo.gl/Cag0r6}
+ *
+ * @see {module:datastore/query}
+ *
+ * @param {string=} namespace - Namespace.
+ * @param {string} kind - The kind to query.
+ * @return {module:datastore/query}
+ */
+Datastore.prototype.createQuery = function(namespace, kind) {
+ if (arguments.length === 1) {
+ kind = arrify(namespace);
+ namespace = this.namespace;
+ }
+
+ return new Query(namespace, arrify(kind));
+};
+
+/**
+ * Helper to create a Key object, scoped to the instance's namespace by default.
+ *
+ * You may also specify a configuration object to define a namespace and path.
+ *
+ * @param {...*=} options - Key path. To specify or override a namespace,
+ * you must use an object here to explicitly state it.
+ * @param {object=} options - Configuration object.
+ * @param {...*=} options.path - Key path.
+ * @param {string=} options.namespace - Optional namespace.
+ * @return {Key} A newly created Key from the options given.
*
* @example
- * var gcloud = require('gcloud');
+ * //-
+ * // Create an incomplete key with a kind value of `Company`.
+ * //-
+ * var key = datastore.key('Company');
*
- * // Create a Double.
- * var threeDouble = gcloud.datastore.double(3.0);
+ * //-
+ * // Create a complete key with a kind value of `Company` and id `123`.
+ * //-
+ * var key = datastore.key(['Company', 123]);
+ *
+ * //-
+ * // Create a complete key with a kind value of `Company` and name `Google`.
+ * // Note: `id` is used for numeric identifiers and `name` is used otherwise.
+ * //-
+ * var key = datastore.key(['Company', 'Google']);
+ *
+ * //-
+ * // Create a complete key from a provided namespace and path.
+ * //-
+ * var key = datastore.key({
+ * namespace: 'My-NS',
+ * path: ['Company', 123]
+ * });
*/
-Datastore.double = function(value) {
- return new entity.Double(value);
+Datastore.prototype.key = function(options) {
+ options = is.object(options) ? options : {
+ namespace: this.namespace,
+ path: arrify(options)
+ };
+
+ return new entity.Key(options);
+};
+
+/**
+ * Run a function in the context of a new transaction. Transactions allow you to
+ * perform multiple operations, committing your changes atomically. When you are
+ * finished making your changes within the transaction, run the done() function
+ * provided in the callback function to commit your changes. See an example
+ * below for more information.
+ *
+ * @todo update resource link below.
+ * @resource [Datasets: beginTransaction API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/beginTransaction}
+ *
+ * @param {function} fn - The function to run in the context of a transaction.
+ * @param {module:datastore/transaction} fn.transaction - The Transaction.
+ * @param {function} fn.done - Function used to commit changes.
+ * @param {function} callback - The callback function.
+ * @param {?error} callback.err - An error returned while making this request
+ *
+ * @example
+ * datastore.runInTransaction(function(transaction, done) {
+ * // From the `transaction` object, execute datastore methods as usual.
+ * transaction.get(datastore.key(['Company', 123]), function(err, entity) {
+ * if (err) {
+ * transaction.rollback(done);
+ * return;
+ * }
+ *
+ * // Call `done` when you're ready to commit your changes.
+ * done();
+ * });
+ * }, function(err, apiResponse) {});
+ */
+Datastore.prototype.runInTransaction = function(fn, callback) {
+ var newTransaction = this.createTransaction_();
+
+ newTransaction.begin_(function(err, resp) {
+ if (err) {
+ callback(err, resp);
+ return;
+ }
+
+ fn(newTransaction, newTransaction.commit_.bind(newTransaction, callback));
+ });
+};
+
+/**
+ * Create a new Transaction object.
+ *
+ * @return {module:datastore/transaction}
+ * @private
+ */
+Datastore.prototype.createTransaction_ = function() {
+ return new Transaction(this, this.projectId);
};
module.exports = Datastore;
diff --git a/lib/datastore/pb.js b/lib/datastore/pb.js
deleted file mode 100644
index 68ae909f8683..000000000000
--- a/lib/datastore/pb.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Copyright 2014 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @private
- * @module datastore/pb
- */
-
-'use strict';
-
-var path = require('path');
-var protobufjs = require('protobufjs');
-
-/** @const {string} Path to the proto file. */
-var PROTO_FILE = path.join(__dirname, 'datastore_v1.proto');
-
-/**
- * protobuf.
- *
- * @type {object}
- */
-module.exports = protobufjs.loadProtoFile(PROTO_FILE).build().pb;
diff --git a/lib/datastore/proto.js b/lib/datastore/proto.js
new file mode 100644
index 000000000000..31206a2cf1ba
--- /dev/null
+++ b/lib/datastore/proto.js
@@ -0,0 +1,1376 @@
+// GENERATED BY `pbjs` CLI
+
+module.exports = require("protobufjs").newBuilder({})['import']({
+ "package": "google",
+ "messages": [
+ {
+ "name": "api",
+ "fields": [],
+ "options": {
+ "java_multiple_files": true,
+ "java_outer_classname": "AnnotationsProto",
+ "java_package": "com.google.api"
+ },
+ "messages": [
+ {
+ "name": "HttpRule",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "get",
+ "id": 2,
+ "oneof": "pattern"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "put",
+ "id": 3,
+ "oneof": "pattern"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "post",
+ "id": 4,
+ "oneof": "pattern"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "delete",
+ "id": 5,
+ "oneof": "pattern"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "patch",
+ "id": 6,
+ "oneof": "pattern"
+ },
+ {
+ "rule": "optional",
+ "type": "CustomHttpPattern",
+ "name": "custom",
+ "id": 8,
+ "oneof": "pattern"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "body",
+ "id": 7
+ },
+ {
+ "rule": "repeated",
+ "type": "HttpRule",
+ "name": "additionalBindings",
+ "id": 11
+ }
+ ],
+ "oneofs": {
+ "pattern": [
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 8
+ ]
+ }
+ },
+ {
+ "name": "CustomHttpPattern",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "kind",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "path",
+ "id": 2
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "protobuf",
+ "fields": [],
+ "options": {
+ "java_generate_equals_and_hash": true,
+ "java_multiple_files": true,
+ "java_outer_classname": "WrappersProto",
+ "java_package": "com.google.protobuf",
+ "csharp_namespace": "Google.Protobuf.WellKnownTypes",
+ "objc_class_prefix": "GPB"
+ },
+ "messages": [
+ {
+ "name": "Struct",
+ "fields": [
+ {
+ "rule": "map",
+ "type": "Value",
+ "keytype": "string",
+ "name": "fields",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "Value",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "NullValue",
+ "name": "nullValue",
+ "id": 1,
+ "oneof": "kind"
+ },
+ {
+ "rule": "optional",
+ "type": "double",
+ "name": "numberValue",
+ "id": 2,
+ "oneof": "kind"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "stringValue",
+ "id": 3,
+ "oneof": "kind"
+ },
+ {
+ "rule": "optional",
+ "type": "bool",
+ "name": "boolValue",
+ "id": 4,
+ "oneof": "kind"
+ },
+ {
+ "rule": "optional",
+ "type": "Struct",
+ "name": "structValue",
+ "id": 5,
+ "oneof": "kind"
+ },
+ {
+ "rule": "optional",
+ "type": "ListValue",
+ "name": "listValue",
+ "id": 6,
+ "oneof": "kind"
+ }
+ ],
+ "oneofs": {
+ "kind": [
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6
+ ]
+ }
+ },
+ {
+ "name": "ListValue",
+ "fields": [
+ {
+ "rule": "repeated",
+ "type": "Value",
+ "name": "values",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "Timestamp",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "int64",
+ "name": "seconds",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "int32",
+ "name": "nanos",
+ "id": 2
+ }
+ ]
+ },
+ {
+ "name": "DoubleValue",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "double",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "FloatValue",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "float",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "Int64Value",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "int64",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "UInt64Value",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "uint64",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "Int32Value",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "int32",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "UInt32Value",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "uint32",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "BoolValue",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "bool",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "StringValue",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "BytesValue",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "value",
+ "id": 1
+ }
+ ]
+ }
+ ],
+ "enums": [
+ {
+ "name": "NullValue",
+ "values": [
+ {
+ "name": "NULL_VALUE",
+ "id": 0
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "type",
+ "fields": [],
+ "options": {
+ "java_generate_equals_and_hash": true,
+ "java_multiple_files": true,
+ "java_outer_classname": "LatLngProto",
+ "java_package": "com.google.type"
+ },
+ "messages": [
+ {
+ "name": "LatLng",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "double",
+ "name": "latitude",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "double",
+ "name": "longitude",
+ "id": 2
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "datastore",
+ "fields": [],
+ "messages": [
+ {
+ "name": "v1beta3",
+ "fields": [],
+ "options": {
+ "java_multiple_files": true,
+ "java_outer_classname": "DatastoreProto",
+ "java_package": "com.google.datastore.v1beta3"
+ },
+ "messages": [
+ {
+ "name": "PartitionId",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 2
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "namespaceId",
+ "id": 4
+ }
+ ]
+ },
+ {
+ "name": "Key",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "PartitionId",
+ "name": "partitionId",
+ "id": 1
+ },
+ {
+ "rule": "repeated",
+ "type": "PathElement",
+ "name": "path",
+ "id": 2
+ }
+ ],
+ "messages": [
+ {
+ "name": "PathElement",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "kind",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "int64",
+ "name": "id",
+ "id": 2,
+ "oneof": "id_type"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "name",
+ "id": 3,
+ "oneof": "id_type"
+ }
+ ],
+ "oneofs": {
+ "id_type": [
+ 2,
+ 3
+ ]
+ }
+ }
+ ]
+ },
+ {
+ "name": "ArrayValue",
+ "fields": [
+ {
+ "rule": "repeated",
+ "type": "Value",
+ "name": "values",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "Value",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "protobuf.NullValue",
+ "name": "nullValue",
+ "id": 11,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "bool",
+ "name": "booleanValue",
+ "id": 1,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "int64",
+ "name": "integerValue",
+ "id": 2,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "double",
+ "name": "doubleValue",
+ "id": 3,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "protobuf.Timestamp",
+ "name": "timestampValue",
+ "id": 10,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "Key",
+ "name": "keyValue",
+ "id": 5,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "stringValue",
+ "id": 17,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "blobValue",
+ "id": 18,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "type.LatLng",
+ "name": "geoPointValue",
+ "id": 8,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "Entity",
+ "name": "entityValue",
+ "id": 6,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "ArrayValue",
+ "name": "arrayValue",
+ "id": 9,
+ "oneof": "value_type"
+ },
+ {
+ "rule": "optional",
+ "type": "int32",
+ "name": "meaning",
+ "id": 14
+ },
+ {
+ "rule": "optional",
+ "type": "bool",
+ "name": "excludeFromIndexes",
+ "id": 19
+ }
+ ],
+ "oneofs": {
+ "value_type": [
+ 11,
+ 1,
+ 2,
+ 3,
+ 10,
+ 5,
+ 17,
+ 18,
+ 8,
+ 6,
+ 9
+ ]
+ }
+ },
+ {
+ "name": "Entity",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "Key",
+ "name": "key",
+ "id": 1
+ },
+ {
+ "rule": "map",
+ "type": "Value",
+ "keytype": "string",
+ "name": "properties",
+ "id": 3
+ }
+ ]
+ },
+ {
+ "name": "EntityResult",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "Entity",
+ "name": "entity",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "cursor",
+ "id": 3
+ }
+ ],
+ "enums": [
+ {
+ "name": "ResultType",
+ "values": [
+ {
+ "name": "RESULT_TYPE_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "FULL",
+ "id": 1
+ },
+ {
+ "name": "PROJECTION",
+ "id": 2
+ },
+ {
+ "name": "KEY_ONLY",
+ "id": 3
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "Query",
+ "fields": [
+ {
+ "rule": "repeated",
+ "type": "Projection",
+ "name": "projection",
+ "id": 2
+ },
+ {
+ "rule": "repeated",
+ "type": "KindExpression",
+ "name": "kind",
+ "id": 3
+ },
+ {
+ "rule": "optional",
+ "type": "Filter",
+ "name": "filter",
+ "id": 4
+ },
+ {
+ "rule": "repeated",
+ "type": "PropertyOrder",
+ "name": "order",
+ "id": 5
+ },
+ {
+ "rule": "repeated",
+ "type": "PropertyReference",
+ "name": "distinctOn",
+ "id": 6
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "startCursor",
+ "id": 7
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "endCursor",
+ "id": 8
+ },
+ {
+ "rule": "optional",
+ "type": "int32",
+ "name": "offset",
+ "id": 10
+ },
+ {
+ "rule": "optional",
+ "type": "protobuf.Int32Value",
+ "name": "limit",
+ "id": 12
+ }
+ ]
+ },
+ {
+ "name": "KindExpression",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "name",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "PropertyReference",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "name",
+ "id": 2
+ }
+ ]
+ },
+ {
+ "name": "Projection",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "PropertyReference",
+ "name": "property",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "PropertyOrder",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "PropertyReference",
+ "name": "property",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "Direction",
+ "name": "direction",
+ "id": 2
+ }
+ ],
+ "enums": [
+ {
+ "name": "Direction",
+ "values": [
+ {
+ "name": "DIRECTION_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "ASCENDING",
+ "id": 1
+ },
+ {
+ "name": "DESCENDING",
+ "id": 2
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "Filter",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "CompositeFilter",
+ "name": "compositeFilter",
+ "id": 1,
+ "oneof": "filter_type"
+ },
+ {
+ "rule": "optional",
+ "type": "PropertyFilter",
+ "name": "propertyFilter",
+ "id": 2,
+ "oneof": "filter_type"
+ }
+ ],
+ "oneofs": {
+ "filter_type": [
+ 1,
+ 2
+ ]
+ }
+ },
+ {
+ "name": "CompositeFilter",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "Operator",
+ "name": "op",
+ "id": 1
+ },
+ {
+ "rule": "repeated",
+ "type": "Filter",
+ "name": "filters",
+ "id": 2
+ }
+ ],
+ "enums": [
+ {
+ "name": "Operator",
+ "values": [
+ {
+ "name": "OPERATOR_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "AND",
+ "id": 1
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "PropertyFilter",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "PropertyReference",
+ "name": "property",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "Operator",
+ "name": "op",
+ "id": 2
+ },
+ {
+ "rule": "optional",
+ "type": "Value",
+ "name": "value",
+ "id": 3
+ }
+ ],
+ "enums": [
+ {
+ "name": "Operator",
+ "values": [
+ {
+ "name": "OPERATOR_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "LESS_THAN",
+ "id": 1
+ },
+ {
+ "name": "LESS_THAN_OR_EQUAL",
+ "id": 2
+ },
+ {
+ "name": "GREATER_THAN",
+ "id": 3
+ },
+ {
+ "name": "GREATER_THAN_OR_EQUAL",
+ "id": 4
+ },
+ {
+ "name": "EQUAL",
+ "id": 5
+ },
+ {
+ "name": "HAS_ANCESTOR",
+ "id": 11
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "GqlQuery",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "queryString",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "bool",
+ "name": "allowLiterals",
+ "id": 2
+ },
+ {
+ "rule": "map",
+ "type": "GqlQueryParameter",
+ "keytype": "string",
+ "name": "namedBindings",
+ "id": 5
+ },
+ {
+ "rule": "repeated",
+ "type": "GqlQueryParameter",
+ "name": "positionalBindings",
+ "id": 4
+ }
+ ]
+ },
+ {
+ "name": "GqlQueryParameter",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "Value",
+ "name": "value",
+ "id": 2,
+ "oneof": "parameter_type"
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "cursor",
+ "id": 3,
+ "oneof": "parameter_type"
+ }
+ ],
+ "oneofs": {
+ "parameter_type": [
+ 2,
+ 3
+ ]
+ }
+ },
+ {
+ "name": "QueryResultBatch",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "int32",
+ "name": "skippedResults",
+ "id": 6
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "skippedCursor",
+ "id": 3
+ },
+ {
+ "rule": "optional",
+ "type": "EntityResult.ResultType",
+ "name": "entityResultType",
+ "id": 1
+ },
+ {
+ "rule": "repeated",
+ "type": "EntityResult",
+ "name": "entityResults",
+ "id": 2
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "endCursor",
+ "id": 4
+ },
+ {
+ "rule": "optional",
+ "type": "MoreResultsType",
+ "name": "moreResults",
+ "id": 5
+ }
+ ],
+ "enums": [
+ {
+ "name": "MoreResultsType",
+ "values": [
+ {
+ "name": "MORE_RESULTS_TYPE_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "NOT_FINISHED",
+ "id": 1
+ },
+ {
+ "name": "MORE_RESULTS_AFTER_LIMIT",
+ "id": 2
+ },
+ {
+ "name": "MORE_RESULTS_AFTER_CURSOR",
+ "id": 4
+ },
+ {
+ "name": "NO_MORE_RESULTS",
+ "id": 3
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "LookupRequest",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 8
+ },
+ {
+ "rule": "optional",
+ "type": "ReadOptions",
+ "name": "readOptions",
+ "id": 1
+ },
+ {
+ "rule": "repeated",
+ "type": "Key",
+ "name": "keys",
+ "id": 3
+ }
+ ]
+ },
+ {
+ "name": "LookupResponse",
+ "fields": [
+ {
+ "rule": "repeated",
+ "type": "EntityResult",
+ "name": "found",
+ "id": 1
+ },
+ {
+ "rule": "repeated",
+ "type": "EntityResult",
+ "name": "missing",
+ "id": 2
+ },
+ {
+ "rule": "repeated",
+ "type": "Key",
+ "name": "deferred",
+ "id": 3
+ }
+ ]
+ },
+ {
+ "name": "RunQueryRequest",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 8
+ },
+ {
+ "rule": "optional",
+ "type": "PartitionId",
+ "name": "partitionId",
+ "id": 2
+ },
+ {
+ "rule": "optional",
+ "type": "ReadOptions",
+ "name": "readOptions",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "Query",
+ "name": "query",
+ "id": 3,
+ "oneof": "query_type"
+ },
+ {
+ "rule": "optional",
+ "type": "GqlQuery",
+ "name": "gqlQuery",
+ "id": 7,
+ "oneof": "query_type"
+ }
+ ],
+ "oneofs": {
+ "query_type": [
+ 3,
+ 7
+ ]
+ }
+ },
+ {
+ "name": "RunQueryResponse",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "QueryResultBatch",
+ "name": "batch",
+ "id": 1
+ },
+ {
+ "rule": "optional",
+ "type": "Query",
+ "name": "query",
+ "id": 2
+ }
+ ]
+ },
+ {
+ "name": "BeginTransactionRequest",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 8
+ }
+ ]
+ },
+ {
+ "name": "BeginTransactionResponse",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "transaction",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "RollbackRequest",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 8
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "transaction",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "RollbackResponse",
+ "fields": []
+ },
+ {
+ "name": "CommitRequest",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 8
+ },
+ {
+ "rule": "optional",
+ "type": "Mode",
+ "name": "mode",
+ "id": 5
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "transaction",
+ "id": 1
+ },
+ {
+ "rule": "repeated",
+ "type": "Mutation",
+ "name": "mutations",
+ "id": 6
+ }
+ ],
+ "enums": [
+ {
+ "name": "Mode",
+ "values": [
+ {
+ "name": "MODE_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "TRANSACTIONAL",
+ "id": 1
+ },
+ {
+ "name": "NON_TRANSACTIONAL",
+ "id": 2
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "CommitResponse",
+ "fields": [
+ {
+ "rule": "repeated",
+ "type": "MutationResult",
+ "name": "mutationResults",
+ "id": 3
+ },
+ {
+ "rule": "optional",
+ "type": "int32",
+ "name": "indexUpdates",
+ "id": 4
+ }
+ ]
+ },
+ {
+ "name": "AllocateIdsRequest",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "string",
+ "name": "projectId",
+ "id": 8
+ },
+ {
+ "rule": "repeated",
+ "type": "Key",
+ "name": "keys",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "AllocateIdsResponse",
+ "fields": [
+ {
+ "rule": "repeated",
+ "type": "Key",
+ "name": "keys",
+ "id": 1
+ }
+ ]
+ },
+ {
+ "name": "Mutation",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "Entity",
+ "name": "insert",
+ "id": 4,
+ "oneof": "operation"
+ },
+ {
+ "rule": "optional",
+ "type": "Entity",
+ "name": "update",
+ "id": 5,
+ "oneof": "operation"
+ },
+ {
+ "rule": "optional",
+ "type": "Entity",
+ "name": "upsert",
+ "id": 6,
+ "oneof": "operation"
+ },
+ {
+ "rule": "optional",
+ "type": "Key",
+ "name": "delete",
+ "id": 7,
+ "oneof": "operation"
+ }
+ ],
+ "oneofs": {
+ "operation": [
+ 4,
+ 5,
+ 6,
+ 7
+ ]
+ }
+ },
+ {
+ "name": "MutationResult",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "Key",
+ "name": "key",
+ "id": 3
+ }
+ ]
+ },
+ {
+ "name": "ReadOptions",
+ "fields": [
+ {
+ "rule": "optional",
+ "type": "ReadConsistency",
+ "name": "readConsistency",
+ "id": 1,
+ "oneof": "consistency_type"
+ },
+ {
+ "rule": "optional",
+ "type": "bytes",
+ "name": "transaction",
+ "id": 2,
+ "oneof": "consistency_type"
+ }
+ ],
+ "oneofs": {
+ "consistency_type": [
+ 1,
+ 2
+ ]
+ },
+ "enums": [
+ {
+ "name": "ReadConsistency",
+ "values": [
+ {
+ "name": "READ_CONSISTENCY_UNSPECIFIED",
+ "id": 0
+ },
+ {
+ "name": "STRONG",
+ "id": 1
+ },
+ {
+ "name": "EVENTUAL",
+ "id": 2
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "services": [
+ {
+ "name": "Datastore",
+ "options": {},
+ "rpc": {
+ "Lookup": {
+ "request": "LookupRequest",
+ "response": "LookupResponse",
+ "options": {
+ "(google.api.http).post": "/v1beta3/projects/{project_id}:lookup",
+ "(google.api.http).body": "*"
+ }
+ },
+ "RunQuery": {
+ "request": "RunQueryRequest",
+ "response": "RunQueryResponse",
+ "options": {
+ "(google.api.http).post": "/v1beta3/projects/{project_id}:runQuery",
+ "(google.api.http).body": "*"
+ }
+ },
+ "BeginTransaction": {
+ "request": "BeginTransactionRequest",
+ "response": "BeginTransactionResponse",
+ "options": {
+ "(google.api.http).post": "/v1beta3/projects/{project_id}:beginTransaction",
+ "(google.api.http).body": "*"
+ }
+ },
+ "Commit": {
+ "request": "CommitRequest",
+ "response": "CommitResponse",
+ "options": {
+ "(google.api.http).post": "/v1beta3/projects/{project_id}:commit",
+ "(google.api.http).body": "*"
+ }
+ },
+ "Rollback": {
+ "request": "RollbackRequest",
+ "response": "RollbackResponse",
+ "options": {
+ "(google.api.http).post": "/v1beta3/projects/{project_id}:rollback",
+ "(google.api.http).body": "*"
+ }
+ },
+ "AllocateIds": {
+ "request": "AllocateIdsRequest",
+ "response": "AllocateIdsResponse",
+ "options": {
+ "(google.api.http).post": "/v1beta3/projects/{project_id}:allocateIds",
+ "(google.api.http).body": "*"
+ }
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}).build();
diff --git a/lib/datastore/query.js b/lib/datastore/query.js
index 3c8c38eb746a..fe64fe33c18e 100644
--- a/lib/datastore/query.js
+++ b/lib/datastore/query.js
@@ -26,8 +26,8 @@ var arrify = require('arrify');
* Build a Query object.
*
* **Queries should be built with
- * {@linkcode module:datastore/dataset#createQuery} and run via
- * {@linkcode module:datastore/dataset#runQuery}.**
+ * {@linkcode module:datastore#createQuery} and run via
+ * {@linkcode module:datastore#runQuery}.**
*
* @resource [Datastore Queries]{@link http://goo.gl/Cag0r6}
*
@@ -38,19 +38,13 @@ var arrify = require('arrify');
* @param {string} kind - Kind to query.
*
* @example
- * var dataset = gcloud.datastore.dataset({
+ * var gcloud = require('gcloud')({
+ * keyFilename: '/path/to/keyfile.json',
* projectId: 'grape-spaceship-123'
* });
*
- * // If your dataset was scoped to a namespace at initialization, your query
- * // will likewise be scoped to that namespace.
- * var query = dataset.createQuery('Lion');
- *
- * // However, you may override the namespace per query.
- * var query = dataset.createQuery('AnimalNamespace', 'Lion');
- *
- * // You may also remove the namespace altogether.
- * var query = dataset.createQuery(null, 'Lion');
+ * var datastore = gcloud.datastore();
+ * var query = datastore.createQuery('AnimalNamespace', 'Lion');
*/
function Query(namespace, kinds) {
if (!kinds) {
@@ -103,7 +97,8 @@ Query.prototype.autoPaginate = function(autoPaginateVal) {
*
* // To filter by key, use `__key__` for the property name. Filter on keys
* // stored as properties is not currently supported.
- * var keyQuery = query.filter('__key__ =', dataset.key(['Company', 'Google']));
+ * var key = datastore.key(['Company', 'Google']);
+ * var keyQuery = query.filter('__key__ =', key);
*/
Query.prototype.filter = function(filter, value) {
// TODO: Add filter validation.
@@ -128,7 +123,7 @@ Query.prototype.filter = function(filter, value) {
* @return {module:datastore/query}
*
* @example
- * var ancestoryQuery = query.hasAncestor(dataset.key(['Parent', 123]));
+ * var ancestoryQuery = query.hasAncestor(datastore.key(['Parent', 123]));
*/
Query.prototype.hasAncestor = function(key) {
this.filters.push({ name: '__key__', op: 'HAS_ANCESTOR', val: key });
diff --git a/lib/datastore/request.js b/lib/datastore/request.js
index eb73093e0f87..d041dd27c047 100644
--- a/lib/datastore/request.js
+++ b/lib/datastore/request.js
@@ -22,9 +22,11 @@
var arrify = require('arrify');
var concat = require('concat-stream');
+var extend = require('extend');
var format = require('string-format-obj');
var is = require('is');
var propAssign = require('prop-assign');
+var proto = require('./proto.js').google.datastore.v1beta3;
var request = require('request').defaults({
pool: {
maxSockets: Infinity
@@ -32,7 +34,6 @@ var request = require('request').defaults({
});
var split = require('split-array-stream');
var through = require('through2');
-var extend = require('extend');
/**
* @type {module:datastore/entity}
@@ -40,12 +41,6 @@ var extend = require('extend');
*/
var entity = require('./entity.js');
-/**
- * @type {module:datastore/pb}
- * @private
- */
-var pb = require('./pb.js');
-
/**
* @type {module:datastore/query}
* @private
@@ -80,19 +75,19 @@ var MODE_TRANSACTIONAL = 'TRANSACTIONAL';
*
* Handles request logic for Datastore.
*
- * Creates requests to the Dataset endpoint. Designed to be inherited by
- * datastore.Dataset and datastore.Transaction objects.
+ * Creates requests to the Datastore endpoint. Designed to be inherited by
+ * {module:datastore} and {module:datastore/transaction} objects.
*
* @example
* // This is how to create a transaction object directly using this Transaction
* // class. The following transaction object is created for use in the examples
* // in this file below.
- * var dataset = gcloud.datastore.dataset({ projectId: 'project-id' });
+ * var datastore = gcloud.datastore({ projectId: 'project-id' });
* var Transaction = require('gcloud/lib/datastore/transaction');
- * var transaction = new Transaction(dataset, 'my-project-id');
+ * var transaction = new Transaction(datastore, 'my-project-id');
* transaction.id = '1234'; // Give the transaction an ID.
*/
-/*
+/**
* Handle logic for Datastore API operations.
*
* @constructor
@@ -106,6 +101,7 @@ function DatastoreRequest() {}
* transaction. Get operations require a valid key to retrieve the
* key-identified entity from Datastore.
*
+ * @todo update resource link below.
* @resource [Datasets: lookup API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/lookup}
*
* @throws {Error} If at least one Key object is not provided.
@@ -119,13 +115,13 @@ function DatastoreRequest() {}
* @example
* //-
* // Where you see `transaction`, assume this is the context that's relevant to
- * // your use, whether that be a Dataset or Transaction object.
+ * // your use, whether that be a Datastore or Transaction object.
* //-
*
* //-
* // Get a single entity.
* //-
- * var key = dataset.key(['Company', 123]);
+ * var key = datastore.key(['Company', 123]);
*
* transaction.get(key, function(err, entity) {});
*
@@ -133,8 +129,8 @@ function DatastoreRequest() {}
* // Get multiple entities at once with a callback.
* //-
* var keys = [
- * dataset.key(['Company', 123]),
- * dataset.key(['Product', 'Computer'])
+ * datastore.key(['Company', 123]),
+ * datastore.key(['Product', 'Computer'])
* ];
*
* transaction.get(keys, function(err, entities) {});
@@ -195,13 +191,13 @@ DatastoreRequest.prototype.get = function(keys, callback) {
});
}
- this.makeReq_('lookup', { key: keys }, onApiResponse);
+ this.makeReq_('lookup', { keys: keys }, onApiResponse);
return stream;
};
/**
- * Maps to {module:datastore/dataset#save}, forcing the method to be `insert`.
+ * Maps to {module:datastore#save}, forcing the method to be `insert`.
*/
DatastoreRequest.prototype.insert = function(entities, callback) {
entities = arrify(entities).map(propAssign('method', 'insert'));
@@ -223,6 +219,7 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* included in *all* indexes, you must supply an entity's `data` property as an
* array. See below for an example.
*
+ * @todo update resource link below.
* @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit}
*
* @borrows {module:datastore/transaction#save} as save
@@ -232,7 +229,7 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* @param {object|object[]} entities - Datastore key object(s).
* @param {Key} entities.key - Datastore key object.
* @param {string=} entities.method - Optional method to explicity use for save.
- * The choices include 'insert', 'update', 'upsert' and 'insert_auto_id'.
+ * The choices include 'insert', 'update', and 'upsert'.
* @param {object|object[]} entities.data - Data to save with the provided key.
* If you provide an array of objects, you must use the explicit syntax:
* `name` for the name of the property and `value` for its value. You may
@@ -249,9 +246,9 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // Key object used to save will be updated to contain the path with its
* // generated ID.
* //-
- * var key = dataset.key('Company');
+ * var key = datastore.key('Company');
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* rating: '10'
@@ -268,9 +265,9 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // original Key object used to save will be updated to contain the path with
* // the name instead of a generated ID.
* //-
- * var key = dataset.key(['Company', 'donutshack']);
+ * var key = datastore.key(['Company', 'donutshack']);
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* name: 'DonutShack',
@@ -288,12 +285,12 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* //
* // Here we are providing a key with namespace.
* //-
- * var key = dataset.key({
+ * var key = datastore.key({
* namespace: 'my-namespace',
* path: ['Company', 'donutshack']
* });
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* name: 'DonutShack',
@@ -312,9 +309,9 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // Key object used to save will be updated to contain the path with its
* // generated ID.
* //-
- * var key = dataset.key('Company');
+ * var key = datastore.key('Company');
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* name: 'DonutShack', // strings
@@ -332,8 +329,8 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // To specify an `excludeFromIndexes` value for a Datastore entity, pass in
* // an array for the key's data. The above example would then look like:
* //-
- * dataset.save({
- * key: dataset.key('Company'),
+ * datastore.save({
+ * key: datastore.key('Company'),
* data: [
* {
* name: 'rating',
@@ -346,10 +343,10 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* //-
* // Save multiple entities at once.
* //-
- * var companyKey = dataset.key(['Company', 123]);
- * var productKey = dataset.key(['Product', 'Computer']);
+ * var companyKey = datastore.key(['Company', 123]);
+ * var productKey = datastore.key(['Product', 'Computer']);
*
- * dataset.save([
+ * datastore.save([
* {
* key: companyKey,
* data: {
@@ -367,9 +364,9 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* //-
* // Explicitly attempt to 'insert' a specific entity.
* //-
- * var userKey = dataset.key(['User', 'chilts']);
+ * var userKey = datastore.key(['User', 'chilts']);
*
- * dataset.save([
+ * datastore.save([
* {
* key: userKey,
* method: 'insert', // force the method to 'insert'
@@ -382,13 +379,12 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
DatastoreRequest.prototype.save = function(entities, callback) {
entities = arrify(entities);
- var insertIndexes = [];
-
- var mutation = {
- insert: [],
- update: [],
- upsert: [],
- insert_auto_id: []
+ var insertIndexes = {};
+ var mutations = [];
+ var methods = {
+ insert: true,
+ update: true,
+ upsert: true
};
// Iterate over the entity objects, build a proto from all keys and values,
@@ -396,24 +392,33 @@ DatastoreRequest.prototype.save = function(entities, callback) {
entities.forEach(function(entityObject, index) {
entityObject = extend(true, {}, entityObject);
+ var mutation = {};
var entityProto = {};
- var method = entityObject.method;
+ var method = 'upsert';
+
+ if (entityObject.method) {
+ if (methods[entityObject.method]) {
+ method = entityObject.method;
+ } else {
+ throw new Error('Method ' + method + ' not recognized.');
+ }
+ }
+
+ if (!entity.isKeyComplete(entityObject.key)) {
+ insertIndexes[index] = true;
+ }
if (is.array(entityObject.data)) {
- entityProto.property = entityObject.data.map(function(data) {
- data.value = entity.valueToProperty(data.value);
+ entityProto.properties = entityObject.data.map(function(data) {
+ data.value = entity.encodeValue(data.value);
if (is.boolean(data.excludeFromIndexes)) {
- var indexed = !data.excludeFromIndexes;
+ var excluded = data.excludeFromIndexes;
+ var values = data.value.arrayValue;
- if (is.array(data.value.list_value)) {
- data.value.list_value =
- data.value.list_value.map(propAssign('indexed', indexed));
- } else {
- data.value.indexed = indexed;
+ if (is.array(values)) {
+ values = values.map(propAssign('excludeFromIndexes', excluded));
}
-
- delete data.excludeFromIndexes;
}
return data;
@@ -424,28 +429,12 @@ DatastoreRequest.prototype.save = function(entities, callback) {
entityProto.key = entity.keyToKeyProto(entityObject.key);
- if (method) {
- if (mutation[method]) {
- mutation[method].push(entityProto);
-
- if (method === 'insert_auto_id') {
- insertIndexes.push(index);
- }
- } else {
- throw new Error('Method ' + method + ' not recognized.');
- }
- } else {
- if (entity.isKeyComplete(entityObject.key)) {
- mutation.upsert.push(entityProto);
- } else {
- insertIndexes.push(index);
- mutation.insert_auto_id.push(entityProto);
- }
- }
+ mutation[method] = entityProto;
+ mutations.push(mutation);
});
var req = {
- mutation: mutation
+ mutations: mutations
};
if (this.id) {
@@ -462,10 +451,15 @@ DatastoreRequest.prototype.save = function(entities, callback) {
return;
}
- var autoInserted = (resp.mutation_result.insert_auto_id_key || []);
- autoInserted.forEach(function(key, index) {
- var path = entity.keyFromKeyProto(key).path;
- entities[insertIndexes[index]].key.path = path;
+ arrify(resp.mutationResults).forEach(function(result, index) {
+ if (!result.key) {
+ return;
+ }
+
+ if (insertIndexes[index]) {
+ var path = entity.keyFromKeyProto(result.key).path;
+ entities[index].key.path = path;
+ }
});
callback(null, resp);
@@ -475,6 +469,7 @@ DatastoreRequest.prototype.save = function(entities, callback) {
/**
* Delete all entities identified with the specified key(s).
*
+ * @todo update resource doc below.
* @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit#mutation.delete}
*
* @param {Key|Key[]} key - Datastore key object(s).
@@ -485,25 +480,28 @@ DatastoreRequest.prototype.save = function(entities, callback) {
* @example
* //-
* // Where you see `transaction`, assume this is the context that's relevant to
- * // your use case, whether that be a Dataset or a Transaction object.
+ * // your use case, whether that be a Datastore or a Transaction object.
* //-
*
* // Delete a single entity.
- * transaction.delete(dataset.key(['Company', 123]), function(err, apiResp) {});
+ * var key = datastore.key(['Company', 123]);
+ * transaction.delete(key, function(err, apiResp) {});
*
* // Delete multiple entities at once.
* transaction.delete([
- * dataset.key(['Company', 123]),
- * dataset.key(['Product', 'Computer'])
+ * datastore.key(['Company', 123]),
+ * datastore.key(['Product', 'Computer'])
* ], function(err, apiResponse) {});
*/
DatastoreRequest.prototype.delete = function(keys, callback) {
callback = callback || util.noop;
var req = {
- mutation: {
- delete: arrify(keys).map(entity.keyToKeyProto)
- }
+ mutations: arrify(keys).map(function(key) {
+ return {
+ delete: entity.keyToKeyProto(key)
+ };
+ })
};
if (this.id) {
@@ -529,6 +527,7 @@ DatastoreRequest.prototype.delete = function(keys, callback) {
*
* See below for examples of both approaches.
*
+ * @todo update resource link below.
* @resource [Datasets: runQuery API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/runQuery}
*
* @param {module:datastore/query} q - Query object.
@@ -551,9 +550,9 @@ DatastoreRequest.prototype.delete = function(keys, callback) {
* @example
* //-
* // Where you see `transaction`, assume this is the context that's relevant to
- * // your use, whether that be a Dataset or a Transaction object.
+ * // your use, whether that be a Datastore or a Transaction object.
* //-
- * var query = dataset.createQuery('Lion');
+ * var query = datastore.createQuery('Lion');
*
* transaction.runQuery(query, function(err, entities) {
* if (!err) {
@@ -565,7 +564,7 @@ DatastoreRequest.prototype.delete = function(keys, callback) {
* // To control how many API requests are made and page through the results
* // manually, call `autoPaginate(false)` on your query.
* //-
- * var manualPageQuery = dataset.createQuery('Lion').autoPaginate(false);
+ * var manualPageQuery = datastore.createQuery('Lion').autoPaginate(false);
*
* var callback = function(err, entities, nextQuery, apiResponse) {
* if (nextQuery) {
@@ -592,7 +591,7 @@ DatastoreRequest.prototype.delete = function(keys, callback) {
* // A keys-only query returns just the keys of the result entities instead of
* // the entities themselves, at lower latency and cost.
* //-
- * var keysOnlyQuery = dataset.createQuery('Lion').select('__key__');
+ * var keysOnlyQuery = datastore.createQuery('Lion').select('__key__');
*
* transaction.runQuery(keysOnlyQuery, function(err, entities) {
* // entities[].key = Key object
@@ -601,12 +600,12 @@ DatastoreRequest.prototype.delete = function(keys, callback) {
*/
DatastoreRequest.prototype.runQuery = function(query, callback) {
var req = {
- read_options: {},
+ readOptions: {},
query: entity.queryToQueryProto(query)
};
if (query.namespace) {
- req.partition_id = {
+ req.partitionId = {
namespace: query.namespace
};
}
@@ -617,14 +616,22 @@ DatastoreRequest.prototype.runQuery = function(query, callback) {
return;
}
- var entities = entity.formatArray(resp.batch.entity_result);
+ var entities = [];
var nextQuery = null;
- if (resp.batch.end_cursor && entities.length > 0) {
- var endCursor = resp.batch.end_cursor.toBase64();
+ if (resp.batch.entityResults) {
+ entities = entity.formatArray(resp.batch.entityResults);
+ }
+
+ var NOT_FINISHED_CODE = 2;
+
+ if (resp.batch.moreResults === NOT_FINISHED_CODE) {
+ var endCursor = resp.batch.endCursor.toBase64();
+ var offset = query.offsetVal === -1 ? 0 : query.offsetVal;
+ var nextOffset = offset - resp.batch.skippedResults;
nextQuery = extend(true, new Query(), query);
- nextQuery.start(endCursor).offset(0);
+ nextQuery.start(endCursor).offset(nextOffset);
}
callback(null, entities, nextQuery, resp);
@@ -634,6 +641,7 @@ DatastoreRequest.prototype.runQuery = function(query, callback) {
/**
* Generate IDs without creating entities.
*
+ * @todo update resource link below.
* @resource [Datasets: allocateIds API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/allocateIds}
*
* @param {Key} incompleteKey - The key object to complete.
@@ -646,24 +654,29 @@ DatastoreRequest.prototype.runQuery = function(query, callback) {
* @example
* //-
* // Where you see `transaction`, assume this is the context that's relevant to
- * // your use, whether that be a Dataset or a Transaction object.
+ * // your use, whether that be a Datastore or a Transaction object.
* //-
+ * var incompleteKey = datastore.key(['Company']);
*
- * var incompleteKey = dataset.key(['Company']);
- *
+ * //-
* // The following call will create 100 new IDs from the Company kind, which
* // exists under the default namespace.
+ * //-
* transaction.allocateIds(incompleteKey, 100, function(err, keys) {});
*
+ * //-
* // You may prefer to create IDs from a non-default namespace by providing an
* // incomplete key with a namespace. Similar to the previous example, the call
* // below will create 100 new IDs, but from the Company kind that exists under
* // the "ns-test" namespace.
- * var incompleteKey = dataset.key({
+ * //-
+ * var incompleteKey = datastore.key({
* namespace: 'ns-test',
* path: ['Company']
* });
- * var callback = function(err, keys, apiResponse) {};
+ *
+ * function callback(err, keys, apiResponse) {}
+ *
* transaction.allocateIds(incompleteKey, 100, callback);
*/
DatastoreRequest.prototype.allocateIds = function(incompleteKey, n, callback) {
@@ -677,7 +690,7 @@ DatastoreRequest.prototype.allocateIds = function(incompleteKey, n, callback) {
}
var req = {
- key: incompleteKeys
+ keys: incompleteKeys
};
this.makeReq_('allocateIds', req, function(err, resp) {
@@ -686,14 +699,14 @@ DatastoreRequest.prototype.allocateIds = function(incompleteKey, n, callback) {
return;
}
- var keys = (resp.key || []).map(entity.keyFromKeyProto);
+ var keys = (resp.keys || []).map(entity.keyFromKeyProto);
callback(null, keys, resp);
});
};
/**
- * Maps to {module:datastore/dataset#save}, forcing the method to be `update`.
+ * Maps to {module:datastore#save}, forcing the method to be `update`.
*/
DatastoreRequest.prototype.update = function(entities, callback) {
entities = arrify(entities).map(propAssign('method', 'update'));
@@ -701,7 +714,7 @@ DatastoreRequest.prototype.update = function(entities, callback) {
};
/**
- * Maps to {module:datastore/dataset#save}, forcing the method to be `upsert`.
+ * Maps to {module:datastore#save}, forcing the method to be `upsert`.
*/
DatastoreRequest.prototype.upsert = function(entities, callback) {
entities = arrify(entities).map(propAssign('method', 'upsert'));
@@ -717,22 +730,8 @@ DatastoreRequest.prototype.upsert = function(entities, callback) {
* @param {function} callback - The callback function.
*
* @private
- *
- * @example
- * var deleteRequest = {
- * mutation: {
- * delete: [] // datastore key objects.
- * }
- * };
- *
- * var dataset = gcloud.datastore.dataset({ projectId: 'project-id' });
- * var callback = function(err, result, apiResponse) {};
- * var Transaction = require('gcloud/lib/datastore/transaction');
- * var transaction = new Transaction(dataset, 'my-project-id');
- * transaction.makeReq_('commit', deleteRequest, callback);
*/
DatastoreRequest.prototype.makeReq_ = function(method, body, callback) {
- // TODO: Handle non-HTTP 200 cases.
if (!callback) {
callback = body;
body = {};
@@ -754,23 +753,24 @@ DatastoreRequest.prototype.makeReq_ = function(method, body, callback) {
body.transaction = this.id;
}
- if (method === 'lookup' && this.id) {
- body.read_options = body.read_options || {};
- body.read_options.transaction = this.id;
+ if (this.id && (method === 'lookup' || method === 'runQuery')) {
+ body.readOptions = body.readOptions || {};
+ body.readOptions.transaction = this.id;
}
var pbKey = method[0].toUpperCase() + method.substr(1);
- var pbRequest = new pb[pbKey + 'Request'](body).toBuffer();
- var pbResponse = pb[pbKey + 'Response'];
+ var pbRequest = proto[pbKey + 'Request'].encode(body).toBuffer();
+ var pbResponse = proto[pbKey + 'Response'];
var reqOpts = {
method: 'POST',
- uri: format('{apiEndpoint}/{path}/{projectId}/{method}', {
+ uri: format('{apiEndpoint}/v1beta3/projects/{projectId}:{method}', {
apiEndpoint: this.apiEndpoint,
- path: 'datastore/v1beta2/datasets',
projectId: this.projectId,
method: method
}),
+ body: is.empty(body) ? '' : pbRequest,
+ encoding: null,
headers: {
'Content-Type': 'application/x-protobuf'
}
@@ -779,34 +779,25 @@ DatastoreRequest.prototype.makeReq_ = function(method, body, callback) {
this.makeAuthenticatedRequest_(reqOpts, {
onAuthenticated: function(err, authenticatedReqOpts) {
if (err) {
- callback(err, null); // TODO(ryanseys): What goes as third parameter?
+ callback(err, null);
return;
}
- authenticatedReqOpts.headers = authenticatedReqOpts.headers || {};
- authenticatedReqOpts.headers['Content-Length'] = pbRequest.length;
-
- var apiRequest = request(authenticatedReqOpts);
+ request(authenticatedReqOpts, function(err, resp, body) {
+ if (err) {
+ callback(err);
+ return;
+ }
- apiRequest.on('error', callback);
+ util.handleResp(null, resp, body, function(err, result) {
+ if (err) {
+ callback(err, null, result);
+ return;
+ }
- apiRequest.on('response', function(resp) {
- var buffer = new Buffer('');
- resp.on('data', function(chunk) {
- buffer = Buffer.concat([buffer, chunk]);
- });
- resp.on('end', function() {
- util.handleResp(null, resp, buffer.toString(), function(err, result) {
- if (err) {
- callback(err, null, result);
- return;
- }
- callback(null, pbResponse.decode(buffer), result);
- });
+ callback(null, pbResponse.decode(body), result);
});
});
-
- apiRequest.end(pbRequest);
}
});
};
diff --git a/lib/datastore/transaction.js b/lib/datastore/transaction.js
index cdf73daf6faf..ef78ead63b64 100644
--- a/lib/datastore/transaction.js
+++ b/lib/datastore/transaction.js
@@ -38,25 +38,19 @@ var DatastoreRequest = require('./request.js');
var extend = require('extend');
/*! Developer Documentation
- *
- * @param {module:common/connection#Connection} connection - An authenticated
- * connection to Google Cloud Datastore.
- * @param {string} projectId - Dataset ID. This is your project ID from the
- * Google Developers Console.
- *
* @example
* // This is how to create a transaction object directly using this Transaction
* // class. The following transaction object is created for use in the examples
* // in this file below.
- * var dataset = gcloud.datastore.dataset({ projectId: 'project-id' });
+ * var datastore = gcloud.datastore({ projectId: 'project-id' });
* var Transaction = require('gcloud/lib/datastore/transaction');
- * var transaction = new Transaction(dataset, 'my-project-id');
+ * var transaction = new Transaction(datastore, 'my-project-id');
* transaction.id = '1234'; // Give the transaction an ID.
*/
/**
* Build a Transaction object. Transactions will be created for you by
- * {@linkcode module:datastore/dataset}. When you need to run a transactional
- * operation, use {@linkcode module:datastore/dataset#runInTransaction}.
+ * {module:datastore}. When you need to run a transactional
+ * operation, use {module:datastore#runInTransaction}.
*
* @resource [Transactions Reference]{@link https://cloud.google.com/datastore/docs/concepts/transactions}
*
@@ -65,14 +59,14 @@ var extend = require('extend');
* @mixes module:datastore/request
*
* @example
- * dataset.runInTransaction(function(transaction, done) {
+ * datastore.runInTransaction(function(transaction, done) {
* // `transaction` is a Transaction object.
* }, function(err) {});
*/
-function Transaction(dataset, projectId) {
+function Transaction(datastore, projectId) {
this.id = null;
- this.apiEndpoint = dataset.apiEndpoint;
- this.makeAuthenticatedRequest_ = dataset.makeAuthenticatedRequest_;
+ this.apiEndpoint = datastore.apiEndpoint;
+ this.makeAuthenticatedRequest_ = datastore.makeAuthenticatedRequest_;
this.projectId = projectId;
// A queue for entity modifications made during the transaction.
@@ -121,6 +115,7 @@ Transaction.prototype.begin_ = function(callback) {
/**
* Reverse a transaction remotely and finalize the current transaction instance.
*
+ * @todo update resource link below.
* @resource [Datasets: rollback API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/rollback}
*
* @param {function} callback - The callback function.
@@ -149,7 +144,7 @@ Transaction.prototype.rollback = function(callback) {
/**
* Commit the remote transaction and finalize the current transaction instance.
* This function is provided as the `done` function in the callback of
- * `dataset.runInTransaction(function(transaction, done) {});`
+ * `datastore.runInTransaction(function(transaction, done) {});`
*
* @param {function} callback - The callback function.
*
@@ -265,18 +260,19 @@ Transaction.prototype.commit_ = function(callback) {
* Delete all entities identified with the specified key(s) in the current
* transaction.
*
+ * @todo update the resource link below.
* @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit#mutation.delete}
*
* @param {Key|Key[]} key - Datastore key object(s).
*
* @example
* // Delete a single entity.
- * transaction.delete(dataset.key(['Company', 123]));
+ * transaction.delete(datastore.key(['Company', 123]));
*
* // Delete multiple entities at once.
* transaction.delete([
- * dataset.key(['Company', 123]),
- * dataset.key(['Product', 'Computer'])
+ * datastore.key(['Company', 123]),
+ * datastore.key(['Product', 'Computer'])
* ]);
*/
Transaction.prototype.delete = function(entities) {
@@ -308,6 +304,7 @@ Transaction.prototype.delete = function(entities) {
* included in *all* indexes, you must supply an entity's `data` property as an
* array. See below for an example.
*
+ * @todo update resource link below.
* @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit}
*
* @param {object|object[]} entities - Datastore key object(s).
@@ -325,7 +322,7 @@ Transaction.prototype.delete = function(entities) {
* // committed, the Key object held by the `key` variable will be populated
* // with a path containing its generated ID.
* //-
- * var key = dataset.key('Company');
+ * var key = datastore.key('Company');
*
* transaction.save({
* key: key,
@@ -352,8 +349,8 @@ Transaction.prototype.delete = function(entities) {
* //-
* // Save multiple entities at once.
* //-
- * var companyKey = dataset.key(['Company', 123]);
- * var productKey = dataset.key(['Product', 'Computer']);
+ * var companyKey = datastore.key(['Company', 123]);
+ * var productKey = datastore.key(['Product', 'Computer']);
* transaction.save([
* {
* key: companyKey,
diff --git a/lib/index.js b/lib/index.js
index 71644c2918b0..d96e72a895a6 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -200,18 +200,6 @@ var apis = {
storage: require('./storage')
};
-/**
- * Scoped APIs are "boxed in" APIs. The "outer" class (e.g. Datastore) is a
- * container for sub-classes that can be given separate authentication and
- * instantiation options.
- *
- * @type {object}
- * @private
- */
-var scopedApis = {
- datastore: true
-};
-
/*! Developer Documentation
*
* Previously we used gcloud to expose an object filled with factory patterns,
@@ -290,11 +278,7 @@ function gcloud(config) {
return Object.keys(apis).reduce(function(gcloudExposedApi, apiName) {
var Class = apis[apiName];
- if (scopedApis[apiName]) {
- gcloudExposedApi[apiName] = new Class(config);
- } else {
- gcloudExposedApi[apiName] = Class;
- }
+ gcloudExposedApi[apiName] = Class;
return gcloudExposedApi;
}, { config_: config });
diff --git a/package.json b/package.json
index 252f52ffb5a1..a9228cf31b46 100644
--- a/package.json
+++ b/package.json
@@ -32,6 +32,7 @@
"main": "./lib/index",
"files": [
"lib/",
+ "proto/",
"AUTHORS",
"CONTRIBUTORS",
"COPYING"
@@ -99,7 +100,7 @@
"once": "^1.3.1",
"prop-assign": "^1.0.0",
"propprop": "^0.3.0",
- "protobufjs": "^4.0.0",
+ "protobufjs": "^4.1.1",
"pumpify": "^1.3.3",
"request": "^2.53.0",
"retry-request": "^1.2.1",
diff --git a/scripts/docs.sh b/scripts/docs.sh
index b88506605d37..292d7238272e 100755
--- a/scripts/docs.sh
+++ b/scripts/docs.sh
@@ -37,7 +37,6 @@
./node_modules/.bin/dox < lib/dns/record.js > docs/json/master/dns/record.json &
./node_modules/.bin/dox < lib/dns/zone.js > docs/json/master/dns/zone.json &
-./node_modules/.bin/dox < lib/datastore/dataset.js > docs/json/master/datastore/dataset.json &
./node_modules/.bin/dox < lib/datastore/index.js > docs/json/master/datastore/index.json &
./node_modules/.bin/dox < lib/datastore/query.js > docs/json/master/datastore/query.json &
./node_modules/.bin/dox < lib/datastore/request.js > docs/json/master/datastore/request.json &
diff --git a/system-test/datastore.js b/system-test/datastore.js
index d7e7fd3e649b..a3e6a5747804 100644
--- a/system-test/datastore.js
+++ b/system-test/datastore.js
@@ -16,20 +16,21 @@
'use strict';
-var env = require('./env.js');
-
var assert = require('assert');
var async = require('async');
-var datastore = require('../lib/datastore');
-var ds = datastore.dataset(env);
+
+var env = require('./env.js');
+var gcloud = require('../lib/index.js')(env);
var entity = require('../lib/datastore/entity.js');
-describe('datastore', function() {
+describe('Datastore', function() {
+ var datastore = gcloud.datastore();
+
it('should allocate IDs', function(done) {
- ds.allocateIds(ds.key('Kind'), 10, function(err, keys) {
+ datastore.allocateIds(datastore.key('Kind'), 10, function(err, keys) {
assert.ifError(err);
- assert.equal(keys.length, 10);
- assert.equal(entity.isKeyComplete(keys[0]), true);
+ assert.strictEqual(keys.length, 10);
+ assert.strictEqual(entity.isKeyComplete(keys[0]), true);
done();
});
});
@@ -46,105 +47,114 @@ describe('datastore', function() {
};
it('should save/get/delete with a key name', function(done) {
- var postKey = ds.key(['Post', 'post1']);
- ds.save({ key: postKey, data: post }, function(err) {
+ var postKey = datastore.key(['Post', 'post1']);
+
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
it('should save/get/delete with a numeric key id', function(done) {
- var postKey = ds.key(['Post', 123456789]);
+ var postKey = datastore.key(['Post', 123456789]);
- ds.save({ key: postKey, data: post }, function(err) {
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
it('should save/get/delete a buffer', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key(['Post']);
var data = {
buf: new Buffer('010100000000000000000059400000000000006940', 'hex')
};
- ds.save({ key: postKey, data: data }, function(err) {
+ datastore.save({ key: postKey, data: data }, function(err) {
assert.ifError(err);
var assignedId = postKey.path[1];
assert(assignedId);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, data);
- ds.delete(ds.key(['Post', assignedId]), done);
+ datastore.delete(datastore.key(['Post', assignedId]), done);
});
});
});
it('should save/get/delete with a generated key id', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key('Post');
- ds.save({ key: postKey, data: post }, function(err) {
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
// The key's path should now be complete.
assert(postKey.path[1]);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
it('should fail explicitly set second insert on save', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key('Post');
- ds.save({ key: postKey, data: post }, function(err) {
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
// The key's path should now be complete.
assert(postKey.path[1]);
- ds.save({ key: postKey, method: 'insert', data: post }, function(err) {
- assert.notEqual(err, null); // should fail insert
+ datastore.save({
+ key: postKey,
+ method: 'insert',
+ data: post
+ }, function(err) {
+ assert.notStrictEqual(err, null); // should fail insert
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
});
it('should fail explicitly set first update on save', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key('Post');
- ds.save({ key: postKey, method: 'update', data: post }, function(err) {
- assert.notEqual(err, null);
+ datastore.save({
+ key: postKey,
+ method: 'update',
+ data: post
+ }, function(err) {
+ assert.notStrictEqual(err, null);
done();
});
});
@@ -159,44 +169,41 @@ describe('datastore', function() {
wordCount: 450,
rating: 4.5,
};
- var key1 = ds.key('Post');
- var key2 = ds.key('Post');
- ds.save([
+ var key1 = datastore.key('Post');
+ var key2 = datastore.key('Post');
+
+ datastore.save([
{ key: key1, data: post },
{ key: key2, data: post2 }
], function(err) {
assert.ifError(err);
- var firstKey = ds.key(['Post', key1.path[1]]);
- var secondKey = ds.key(['Post', key2.path[1]]);
-
- ds.get([firstKey, secondKey], function(err, entities) {
+ datastore.get([key1, key2], function(err, entities) {
assert.ifError(err);
+ assert.strictEqual(entities.length, 2);
- assert.equal(entities.length, 2);
-
- ds.delete([firstKey, secondKey], done);
+ datastore.delete([key1, key2], done);
});
});
});
it('should get multiple entities in a stream', function(done) {
- var key1 = ds.key('Post');
- var key2 = ds.key('Post');
+ var key1 = datastore.key('Post');
+ var key2 = datastore.key('Post');
- ds.save([
+ datastore.save([
{ key: key1, data: post },
{ key: key2, data: post }
], function(err) {
assert.ifError(err);
- var firstKey = ds.key(['Post', key1.path[1]]);
- var secondKey = ds.key(['Post', key2.path[1]]);
+ var firstKey = datastore.key(['Post', key1.path[1]]);
+ var secondKey = datastore.key(['Post', key2.path[1]]);
var numEntitiesEmitted = 0;
- ds.get([firstKey, secondKey])
+ datastore.get([firstKey, secondKey])
.on('error', done)
.on('data', function() {
numEntitiesEmitted++;
@@ -204,123 +211,136 @@ describe('datastore', function() {
.on('end', function() {
assert.strictEqual(numEntitiesEmitted, 2);
- ds.delete([firstKey, secondKey], done);
+ datastore.delete([firstKey, secondKey], done);
});
});
});
- });
- it('should save keys as a part of entity and query by key', function(done) {
- var personKey = ds.key(['Person', 'name']);
- ds.save({
- key: personKey,
- data: {
- fullName: 'Full name',
- linkedTo: personKey // himself
- }
- }, function(err) {
- assert.ifError(err);
- var q = ds.createQuery('Person')
- .filter('linkedTo =', personKey);
- ds.runQuery(q, function(err, results) {
+ it('should save keys as a part of entity and query by key', function(done) {
+ var personKey = datastore.key(['Person', 'name']);
+
+ datastore.save({
+ key: personKey,
+ data: {
+ fullName: 'Full name',
+ linkedTo: personKey // himself
+ }
+ }, function(err) {
assert.ifError(err);
- assert.strictEqual(results[0].data.fullName, 'Full name');
- assert.deepEqual(results[0].data.linkedTo, personKey);
- ds.delete(personKey, done);
+
+ var query = datastore.createQuery('Person')
+ .filter('linkedTo =', personKey);
+
+ datastore.runQuery(query, function(err, results) {
+ assert.ifError(err);
+
+ assert.strictEqual(results[0].data.fullName, 'Full name');
+ assert.deepEqual(results[0].data.linkedTo, personKey);
+
+ datastore.delete(personKey, done);
+ });
});
});
});
describe('querying the datastore', function() {
- var ancestor = ds.key(['Book', 'GoT']);
+ var ancestor = datastore.key(['Book', 'GoT']);
var keys = [
- ds.key(['Book', 'GoT', 'Character', 'Rickard']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard']),
- ds.key(['Book', 'GoT', 'Character', 'Catelyn']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Arya']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Sansa']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Robb']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Bran']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Jon Snow'])
- ];
+ // Paths:
+ ['Rickard'],
+ ['Rickard', 'Character', 'Eddard'],
+ ['Catelyn'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Arya'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Sansa'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Robb'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Bran'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Jon Snow']
+ ].map(function(path) {
+ return datastore.key(['Book', 'GoT', 'Character'].concat(path));
+ });
- var characters = [{
- name: 'Rickard',
- family: 'Stark',
- appearances: 0,
- alive: false
- }, {
- name: 'Eddard',
- family: 'Stark',
- appearances: 9,
- alive: false
- }, {
- name: 'Catelyn',
- family: ['Stark', 'Tully'],
- appearances: 26,
- alive: false
- }, {
- name: 'Arya',
- family: 'Stark',
- appearances: 33,
- alive: true
- }, {
- name: 'Sansa',
- family: 'Stark',
- appearances: 31,
- alive: true
- }, {
- name: 'Robb',
- family: 'Stark',
- appearances: 22,
- alive: false
- }, {
- name: 'Bran',
- family: 'Stark',
- appearances: 25,
- alive: true
- }, {
- name: 'Jon Snow',
- family: 'Stark',
- appearances: 32,
- alive: true
- }];
+ var characters = [
+ {
+ name: 'Rickard',
+ family: 'Stark',
+ appearances: 9,
+ alive: false
+ },
+ {
+ name: 'Eddard',
+ family: 'Stark',
+ appearances: 9,
+ alive: false
+ },
+ {
+ name: 'Catelyn',
+ family: ['Stark', 'Tully'],
+ appearances: 26,
+ alive: false
+ },
+ {
+ name: 'Arya',
+ family: 'Stark',
+ appearances: 33,
+ alive: true
+ },
+ {
+ name: 'Sansa',
+ family: 'Stark',
+ appearances: 31,
+ alive: true
+ },
+ {
+ name: 'Robb',
+ family: 'Stark',
+ appearances: 22,
+ alive: false
+ },
+ {
+ name: 'Bran',
+ family: 'Stark',
+ appearances: 25,
+ alive: true
+ },
+ {
+ name: 'Jon Snow',
+ family: 'Stark',
+ appearances: 32,
+ alive: true
+ }
+ ];
before(function(done) {
- ds.save(keys.map(function(key, index) {
+ var keysToSave = keys.map(function(key, index) {
return {
key: key,
data: characters[index]
};
- }), function(err) {
- assert.ifError(err);
- done();
});
+
+ datastore.save(keysToSave, done);
+ });
+
+ after(function(done) {
+ datastore.delete(keys, done);
});
it('should limit queries', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor).limit(5)
- .autoPaginate(false);
+ var firstQ = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .limit(5)
+ .autoPaginate(false);
- ds.runQuery(q, function(err, firstEntities, secondQuery) {
+ datastore.runQuery(firstQ, function(err, firstEntities, secondQ) {
assert.ifError(err);
- assert.equal(firstEntities.length, 5);
+ assert.strictEqual(firstEntities.length, 5);
- ds.runQuery(secondQuery, function(err, secondEntities, thirdQuery) {
+ datastore.runQuery(secondQ, function(err, secondEntities, thirdQ) {
assert.ifError(err);
- assert.equal(secondEntities.length, 3);
-
- ds.runQuery(thirdQuery, function(err, thirdEntities) {
- assert.ifError(err);
- assert.equal(thirdEntities.length, 0);
- done();
- });
+ assert.strictEqual(secondEntities.length, 3);
+ assert.strictEqual(thirdQ, null);
+ done();
});
});
});
@@ -328,175 +348,191 @@ describe('datastore', function() {
it('should not go over a limit', function(done) {
var limit = 3;
- var q = ds.createQuery('Character')
- .hasAncestor(ancestor)
- .limit(limit);
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor).limit(limit);
- ds.runQuery(q, function(err, results) {
+ datastore.runQuery(q, function(err, results) {
assert.ifError(err);
- assert.equal(results.length, limit);
+ assert.strictEqual(results.length, limit);
done();
});
});
it('should run a query as a stream', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor);
+ var q = datastore.createQuery('Character').hasAncestor(ancestor);
var resultsReturned = 0;
- ds.runQuery(q)
+ datastore.runQuery(q)
.on('error', done)
.on('data', function() { resultsReturned++; })
.on('end', function() {
- assert.equal(resultsReturned, characters.length);
+ assert.strictEqual(resultsReturned, characters.length);
done();
});
});
it('should not go over a limit with a stream', function(done) {
var limit = 3;
- var q = ds.createQuery('Character').hasAncestor(ancestor).limit(limit);
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .limit(limit);
var resultsReturned = 0;
- ds.runQuery(q)
+ datastore.runQuery(q)
.on('error', done)
.on('data', function() { resultsReturned++; })
.on('end', function() {
- assert.equal(resultsReturned, limit);
+ assert.strictEqual(resultsReturned, limit);
done();
});
});
it('should filter queries with simple indexes', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .filter('appearances >=', 20);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('appearances >=', 20);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 6);
+ assert.strictEqual(entities.length, 6);
done();
});
});
it('should filter queries with defined indexes', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .filter('family =', 'Stark')
- .filter('appearances >=', 20);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character').hasAncestor(ancestor)
+ .filter('family =', 'Stark')
+ .filter('appearances >=', 20);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 6);
+ assert.strictEqual(entities.length, 6);
done();
});
});
it('should filter by ancestor', function(done) {
- var q = ds.createQuery('Character')
- .hasAncestor(ancestor);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character').hasAncestor(ancestor);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 8);
+ assert.strictEqual(entities.length, characters.length);
done();
});
});
it('should filter by key', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .filter('__key__ =', ds.key(['Book', 'GoT', 'Character', 'Rickard']));
- ds.runQuery(q, function(err, entities) {
+ var key = datastore.key(['Book', 'GoT', 'Character', 'Rickard']);
+
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('__key__ =', key);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 1);
+ assert.strictEqual(entities.length, 1);
done();
});
});
it('should order queries', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .order('appearances');
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .order('appearances');
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities[0].data.name, characters[0].name);
- assert.equal(entities[7].data.name, characters[3].name);
+
+ assert.strictEqual(entities[0].data.name, characters[0].name);
+ assert.strictEqual(entities[7].data.name, characters[3].name);
+
done();
});
});
it('should select projections', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .select(['name', 'family']);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .select(['name', 'family']);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
+
assert.deepEqual(entities[0].data, {
name: 'Arya',
family: 'Stark'
});
+
assert.deepEqual(entities[8].data, {
name: 'Sansa',
family: 'Stark'
});
+
done();
});
});
it('should paginate with offset and limit', function(done) {
- var q = ds.createQuery('Character')
+ var q = datastore.createQuery('Character')
.hasAncestor(ancestor)
.offset(2)
.limit(3)
.order('appearances')
.autoPaginate(false);
- ds.runQuery(q, function(err, entities, secondQuery) {
+ datastore.runQuery(q, function(err, entities, secondQuery) {
assert.ifError(err);
- assert.equal(entities.length, 3);
- assert.equal(entities[0].data.name, 'Robb');
- assert.equal(entities[2].data.name, 'Catelyn');
+ assert.strictEqual(entities.length, 3);
+ assert.strictEqual(entities[0].data.name, 'Robb');
+ assert.strictEqual(entities[2].data.name, 'Catelyn');
- ds.runQuery(secondQuery.offset(0), function(err, secondEntities) {
+ var offsetQuery = secondQuery.offset(0);
+ datastore.runQuery(offsetQuery, function(err, secondEntities) {
assert.ifError(err);
- assert.equal(secondEntities.length, 3);
- assert.equal(secondEntities[0].data.name, 'Sansa');
- assert.equal(secondEntities[2].data.name, 'Arya');
+ assert.strictEqual(secondEntities.length, 3);
+ assert.strictEqual(secondEntities[0].data.name, 'Sansa');
+ assert.strictEqual(secondEntities[2].data.name, 'Arya');
+
done();
});
});
});
it('should resume from a start cursor', function(done) {
- var q = ds.createQuery('Character')
+ var q = datastore.createQuery('Character')
.hasAncestor(ancestor)
.offset(2)
.limit(2)
.order('appearances')
.autoPaginate(false);
- ds.runQuery(q, function(err, entities, nextQuery) {
+ datastore.runQuery(q, function(err, entities, nextQuery) {
assert.ifError(err);
- ds.runQuery(nextQuery.limit(-1), function(err, secondEntities) {
+ datastore.runQuery(nextQuery.limit(-1), function(err, secondEntities) {
assert.ifError(err);
- assert.equal(secondEntities.length, 4);
- assert.equal(secondEntities[0].data.name, 'Catelyn');
- assert.equal(secondEntities[3].data.name, 'Arya');
+
+ assert.strictEqual(secondEntities.length, 4);
+ assert.strictEqual(secondEntities[0].data.name, 'Catelyn');
+ assert.strictEqual(secondEntities[3].data.name, 'Arya');
+
done();
});
});
});
it('should group queries', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .groupBy('alive');
- ds.runQuery(q, function(err, entities) {
- assert.ifError(err);
- assert.equal(entities.length, 2);
- done();
- });
- });
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .groupBy('appearances');
- after(function(done) {
- ds.delete(keys, function(err) {
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
+ assert.strictEqual(entities.length, characters.length - 1);
done();
});
});
@@ -504,12 +540,12 @@ describe('datastore', function() {
describe('transactions', function() {
it('should run in a transaction', function(done) {
- var key = ds.key(['Company', 'Google']);
+ var key = datastore.key(['Company', 'Google']);
var obj = {
url: 'www.google.com'
};
- ds.runInTransaction(function(t, tDone) {
+ datastore.runInTransaction(function(t, tDone) {
t.get(key, function(err) {
assert.ifError(err);
@@ -519,22 +555,22 @@ describe('datastore', function() {
}, function(err) {
assert.ifError(err);
- ds.get(key, function(err, entity) {
+ datastore.get(key, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, obj);
- ds.delete(key, done);
+ datastore.delete(key, done);
});
});
});
it('should commit all saves and deletes at the end', function(done) {
- var deleteKey = ds.key(['Company', 'Subway']);
- var key = ds.key(['Company', 'Google']);
- var incompleteKey = ds.key('Company');
+ var deleteKey = datastore.key(['Company', 'Subway']);
+ var key = datastore.key(['Company', 'Google']);
+ var incompleteKey = datastore.key('Company');
- ds.runInTransaction(function(t, tDone) {
+ datastore.runInTransaction(function(t, tDone) {
t.delete(deleteKey);
t.save([
@@ -553,43 +589,50 @@ describe('datastore', function() {
assert.ifError(err);
// Incomplete key should have been given an ID.
- assert.equal(incompleteKey.path.length, 2);
+ assert.strictEqual(incompleteKey.path.length, 2);
async.parallel([
// The key queued for deletion should have been deleted.
function(done) {
- ds.get(deleteKey, function(err, entity) {
+ datastore.get(deleteKey, function(err, entity) {
assert.ifError(err);
- assert.equal(typeof entity, 'undefined');
+ assert.strictEqual(typeof entity, 'undefined');
done();
});
},
// Data should have been updated on the key.
function(done) {
- ds.get(key, function(err, entity) {
+ datastore.get(key, function(err, entity) {
assert.ifError(err);
- assert.equal(entity.data.rating, 10);
+ assert.strictEqual(entity.data.rating, 10);
done();
});
}
- ], done);
+ ], function(err) {
+ assert.ifError(err);
+ datastore.delete([key, incompleteKey], done);
+ });
});
});
it('should use the last modification to a key', function(done) {
- var incompleteKey = ds.key('Company');
- var key = ds.key(['Company', 'Google']);
+ var incompleteKey = datastore.key('Company');
+ var key = datastore.key(['Company', 'Google']);
- ds.runInTransaction(function(t, tDone) {
+ datastore.runInTransaction(function(t, tDone) {
t.save([
{
key: key,
- data: { rating: 10 }
+ data: {
+ rating: 10
+ }
},
{
key: incompleteKey,
- data: { rating: 100 }
+ data: {
+ rating: 100
+ }
}
]);
@@ -600,12 +643,12 @@ describe('datastore', function() {
assert.ifError(err);
// Should not return a result.
- ds.get(key, function(err, entity) {
+ datastore.get(key, function(err, entity) {
assert.ifError(err);
assert.strictEqual(entity, undefined);
// Incomplete key should have been given an id.
- assert.equal(incompleteKey.path.length, 2);
+ assert.strictEqual(incompleteKey.path.length, 2);
done();
});
});
diff --git a/test/datastore/dataset.js b/test/datastore/dataset.js
deleted file mode 100644
index cbda6413d895..000000000000
--- a/test/datastore/dataset.js
+++ /dev/null
@@ -1,250 +0,0 @@
-/**
- * Copyright 2014 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-'use strict';
-
-var assert = require('assert');
-var util = require('../../lib/common/util.js');
-
-describe('Dataset', function() {
- var Dataset;
-
- beforeEach(function() {
- delete require.cache[require.resolve('../../lib/datastore/dataset')];
- Dataset = require('../../lib/datastore/dataset');
- });
-
- describe('instantiation', function() {
- it('should throw if a projectId is not specified', function() {
- assert.throws(function() {
- new Dataset();
- }, /Sorry, we cannot connect/);
- });
-
- it('should set default API connection details', function() {
- var options = { a: 'b', c: 'd', projectId: 'project-id' };
- var mockApiEndpoint = 'http://localhost:8080';
-
- Dataset.determineApiEndpoint_ = function(opts) {
- assert.deepEqual(opts, options);
- return mockApiEndpoint;
- };
-
- var ds = new Dataset(options);
- assert.equal(ds.apiEndpoint, mockApiEndpoint);
- });
- });
-
- describe('key', function() {
- it('should return key scoped by default namespace', function() {
- var ds = new Dataset({ projectId: 'test', namespace: 'my-ns' });
- var key = ds.key(['Company', 1]);
- assert.equal(key.namespace, 'my-ns');
- assert.deepEqual(key.path, ['Company', 1]);
- });
-
- it('should allow namespace specification', function() {
- var ds = new Dataset({ projectId: 'test', namespace: 'my-ns' });
- var key = ds.key({
- namespace: 'custom-ns',
- path: ['Company', 1]
- });
- assert.equal(key.namespace, 'custom-ns');
- assert.deepEqual(key.path, ['Company', 1]);
- });
-
- it('should create incomplete key from string', function() {
- var ds = new Dataset({ projectId: 'test' });
- var key = ds.key('hello');
- assert.deepEqual(key.path, ['hello']);
- });
-
- it('should create incomplete key from array in obj', function() {
- var ds = new Dataset({ projectId: 'test' });
- var key = ds.key({
- path: ['world']
- });
- assert.deepEqual(key.path, ['world']);
- });
-
- it('should create incomplete key from array', function() {
- var ds = new Dataset({ projectId: 'test' });
- var key = ds.key(['Company']);
- assert.deepEqual(key.path, ['Company']);
- });
- });
-
- describe('runInTransaction', function() {
- var ds;
-
- beforeEach(function() {
- ds = new Dataset({ projectId: 'test' });
- });
-
- it('should begin transaction', function(done) {
- ds.createTransaction_ = function() {
- return {
- begin_: function() {
- done();
- }
- };
- };
- ds.runInTransaction();
- });
-
- it('should execute callback with error if one occurred', function(done) {
- var error = new Error('Error.');
- var apiResponse = {};
-
- ds.createTransaction_ = function() {
- return {
- begin_: function(callback) {
- callback(error, apiResponse);
- }
- };
- };
-
- ds.runInTransaction(util.noop, function(err, apiResponse_) {
- assert.strictEqual(err, error);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
- });
- });
-
- it('should return transaction object to the callback', function(done) {
- var transaction = {
- begin_: function(callback) {
- callback();
- },
- commit_: util.noop
- };
- ds.createTransaction_ = function() {
- return transaction;
- };
- ds.runInTransaction(function(t) {
- assert.deepEqual(t, transaction);
- done();
- }, assert.ifError);
- });
-
- it('should return correct done function to the callback', function(done) {
- ds.createTransaction_ = function() {
- return {
- begin_: function(callback) {
- callback();
- },
- commit_: function() {
- done();
- }
- };
- };
- ds.runInTransaction(function(t, tDone) {
- tDone();
- }, assert.ifError);
- });
- });
-
- describe('createQuery', function() {
- var ds;
- var dsWithNs;
-
- beforeEach(function() {
- ds = new Dataset({ projectId: 'test' });
- dsWithNs = new Dataset({
- projectId: 'test',
- namespace: 'my-ns'
- });
- });
-
- it('should not include a namespace on a ns-less dataset', function() {
- var query = ds.createQuery('Kind');
- assert.equal(query.namespace, undefined);
- });
-
- it('should scope query to namespace', function() {
- var query = dsWithNs.createQuery('Kind');
- assert.equal(query.namespace, 'my-ns');
- });
-
- it('should allow control over namespace and kinds', function() {
- var queryFromDs = ds.createQuery('my-ns', 'Kind');
- assert.equal(queryFromDs.namespace, 'my-ns');
-
- var queryFromDsWithNs = dsWithNs.createQuery('Kind');
- assert.equal(queryFromDsWithNs.namespace, 'my-ns');
- });
-
- it('should allow removal of namespace', function() {
- var query = dsWithNs.createQuery(null, 'Kind');
- assert.strictEqual(query.namespace, null);
- });
- });
-
- describe('determineApiEndpoint_', function() {
- it('should default to googleapis.com', function() {
- delete process.env.DATASTORE_HOST;
- var expectedApiEndpoint = 'https://www.googleapis.com';
- assert.equal(Dataset.determineApiEndpoint_({}), expectedApiEndpoint);
- });
-
- it('should remove slashes from the apiEndpoint', function() {
- var expectedApiEndpoint = 'http://localhost:8080';
-
- assert.equal(Dataset.determineApiEndpoint_({
- apiEndpoint: expectedApiEndpoint
- }), expectedApiEndpoint);
-
- assert.equal(Dataset.determineApiEndpoint_({
- apiEndpoint: 'http://localhost:8080/'
- }), expectedApiEndpoint);
-
- assert.equal(Dataset.determineApiEndpoint_({
- apiEndpoint: 'http://localhost:8080//'
- }), expectedApiEndpoint);
- });
-
- it('should default to http if protocol is unspecified', function() {
- var apiEndpoint = Dataset.determineApiEndpoint_({
- apiEndpoint: 'localhost:8080'
- });
- assert.equal(apiEndpoint, 'http://localhost:8080');
- });
-
- describe('with DATASTORE_HOST environment variable', function() {
- var DATASTORE_HOST = 'http://localhost:8080';
-
- before(function() {
- process.env.DATASTORE_HOST = DATASTORE_HOST;
- });
-
- after(function() {
- delete process.env.DATASTORE_HOST;
- });
-
- it('should use the DATASTORE_HOST env var', function() {
- assert.equal(Dataset.determineApiEndpoint_({}), DATASTORE_HOST);
- });
-
- it('should favor an explicit apiEndpoint option', function() {
- var expectedApiEndpoint = 'http://apiendpointoverride';
-
- assert.equal(Dataset.determineApiEndpoint_({
- apiEndpoint: expectedApiEndpoint
- }), expectedApiEndpoint);
- });
- });
- });
-});
diff --git a/test/datastore/entity.js b/test/datastore/entity.js
index edb2e3d63a75..48e66d628ead 100644
--- a/test/datastore/entity.js
+++ b/test/datastore/entity.js
@@ -17,426 +17,757 @@
'use strict';
var assert = require('assert');
-var entity = require('../../lib/datastore/entity.js');
-var datastore = require('../../lib/datastore');
-var ByteBuffer = require('bytebuffer');
-
-var entityProto = {
- property: [{
- name: 'linkedTo',
- value: {
- key_value: {
- path_element: [{
- kind: 'Kind',
- name: '123'
- }]
- }
- }
- }, {
- name: 'name',
- value: {
- string_value: 'Some name'
- }
- }, {
- name: 'flagged',
- value: {
- boolean_value: false
- }
- }, {
- name: 'count',
- value: {
- integer_value: 5
- }
- }, {
- name: 'total',
- value: {
- double_value: 7.8
- }
- }, {
- name: 'author',
- value: {
- entity_value: {
- property: [{
- name: 'name',
- value: {
- string_value: 'Burcu Dogan'
- }
- }]
- },
- indexed: false
- }
- }, {
- name: 'list',
- value: {
- list_value: [{
- integer_value: 6
- }, {
- boolean_value: false
- }]
- }
- }]
-};
-
-var queryFilterProto = {
- projection: [],
- kind: [{
- name: 'Kind1'
- }],
- filter: {
- composite_filter: {
- filter: [
- {
- property_filter: {
- property: { name: 'name' },
- operator: 'EQUAL',
- value: { string_value: 'John' }
- }
- },
- {
- property_filter: {
- property: { name: '__key__' },
- operator: 'HAS_ANCESTOR',
- value: {
- key_value: {
- path_element: [{ kind: 'Kind2', name: 'somename' }]
- }
- }
- }
- }
- ],
- operator: 'AND'
- }
- },
- end_cursor: new Buffer('end', 'base64'),
- order: [],
- group_by: []
-};
-
-describe('keyFromKeyProto', function() {
- var proto = {
- partition_id: { namespace: '', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind', name: 'Name' }]
- };
-
- var protoH = {
- partition_id: { namespace: 'Test', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind', id: '111' }, { kind: 'Kind2', name: 'name' }]
- };
-
- var protoIncomplete = {
- partition_id: { namespace: 'Test', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind', id: '111' }, { kind: 'Kind2' }]
- };
-
- var protoInvalid = {
- partition_id: { namespace: 'Test', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind' }, { kind: 'Kind2' }]
- };
-
- it('should handle keys hierarchically', function() {
- var key = entity.keyFromKeyProto(protoH);
- assert.deepEqual(key, new entity.Key({
- namespace: 'Test',
- path: [ 'Kind', 111, 'Kind2', 'name' ]
- }));
+
+var Datastore = require('../../lib/datastore/index.js');
+
+describe('entity', function() {
+ var entity;
+
+ beforeEach(function() {
+ delete require.cache[require.resolve('../../lib/datastore/entity.js')];
+ entity = require('../../lib/datastore/entity.js');
});
- it('should not set namespace if default', function() {
- var key = entity.keyFromKeyProto(proto);
- assert.deepEqual(key, new entity.Key({ path: [ 'Kind', 'Name' ] }));
+ describe('Double', function() {
+ it('should store the value', function() {
+ var value = 8.3;
+
+ var double = new entity.Double(value);
+ assert.strictEqual(double.value, value);
+ });
});
- it('should not inject null into path if no id set', function() {
- var key = entity.keyFromKeyProto(protoIncomplete);
- assert.deepEqual(key, new entity.Key({
- namespace: 'Test',
- path: [ 'Kind', 111, 'Kind2' ]
- }));
+ describe('Int', function() {
+ it('should store the value', function() {
+ var value = 8;
+
+ var int = new entity.Int(value);
+ assert.strictEqual(int.value, value);
+ });
});
- it('should throw if path is invalid', function() {
- assert.throws(function() {
- entity.keyFromKeyProto(protoInvalid);
- }, /Invalid key. Ancestor keys require an id or name./);
+ describe('Key', function() {
+ var key;
+
+ var NAMESPACE = 'namespace';
+ var PATH = ['Kind', 'name'];
+
+ beforeEach(function() {
+ key = new entity.Key({
+ namespace: NAMESPACE,
+ path: PATH
+ });
+ });
+
+ it('should localize the namespace', function() {
+ assert.strictEqual(key.namespace, NAMESPACE);
+ });
+
+ it('should localize the path', function() {
+ assert.strictEqual(key.path, PATH);
+ });
});
-});
-describe('keyToKeyProto', function() {
- it('should handle hierarchical key definitions', function() {
- var key = new entity.Key({ path: [ 'Kind1', 1, 'Kind2', 'name' ] });
- var proto = entity.keyToKeyProto(key);
- assert.strictEqual(proto.partition_id, undefined);
- assert.strictEqual(proto.path_element[0].kind, 'Kind1');
- assert.strictEqual(proto.path_element[0].id, 1);
- assert.strictEqual(proto.path_element[0].name, undefined);
- assert.strictEqual(proto.path_element[1].kind, 'Kind2');
- assert.strictEqual(proto.path_element[1].id, undefined);
- assert.strictEqual(proto.path_element[1].name, 'name');
+ describe('decodeValueProto', function() {
+ it('should decode arrays', function() {
+ var expectedValue = [{}];
+
+ var valueProto = {
+ value_type: 'arrayValue',
+ arrayValue: {
+ values: expectedValue
+ }
+ };
+
+ var run = false;
+
+ var decodeValueProto = entity.decodeValueProto;
+ entity.decodeValueProto = function(valueProto) {
+ if (!run) {
+ run = true;
+ return decodeValueProto.apply(null, arguments);
+ }
+
+ assert.strictEqual(valueProto, expectedValue[0]);
+ return valueProto;
+ };
+
+ assert.deepEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode blobs', function() {
+ var expectedValue = new Buffer('Hi');
+
+ var valueProto = {
+ value_type: 'blobValue',
+ blobValue: {
+ toBuffer: function() {
+ return expectedValue;
+ }
+ }
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode doubles', function() {
+ var expectedValue = 8.3;
+
+ var valueProto = {
+ value_type: 'doubleValue',
+ doubleValue: expectedValue
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode ints', function() {
+ var expectedValue = 8;
+
+ var valueProto = {
+ value_type: 'integerValue',
+ integerValue: expectedValue
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode entities', function() {
+ var expectedValue = {};
+
+ var valueProto = {
+ value_type: 'entityValue',
+ entityValue: expectedValue
+ };
+
+ entity.entityFromEntityProto = function(entityProto) {
+ assert.strictEqual(entityProto, expectedValue);
+ return expectedValue;
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode keys', function() {
+ var expectedValue = {};
+
+ var valueProto = {
+ value_type: 'keyValue',
+ keyValue: expectedValue
+ };
+
+ entity.keyFromKeyProto = function(keyProto) {
+ assert.strictEqual(keyProto, expectedValue);
+ return expectedValue;
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode timestamps', function() {
+ var seconds = String(Date.now() / 1000);
+ var expectedValue = new Date(parseInt(seconds, 10) * 1000);
+
+ var valueProto = {
+ value_type: 'timestampValue',
+ timestampValue: {
+ seconds: seconds
+ }
+ };
+
+ assert.deepEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should return the value if no conversions are necessary', function() {
+ var expectedValue = false;
+
+ var valueProto = {
+ value_type: 'booleanValue',
+ booleanValue: expectedValue
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
});
- it('should detect the namespace of the hierarchical keys', function() {
- var key = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1', 1, 'Kind2', 'name' ]
+ describe('encodeValue', function() {
+ it('should encode a boolean', function() {
+ var value = true;
+
+ var expectedValueProto = {
+ booleanValue: value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an int', function() {
+ var value = 8;
+
+ var expectedValueProto = {
+ integerValue: value
+ };
+
+ entity.Int = function(value_) {
+ assert.strictEqual(value_, value);
+ this.value = value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an Int object', function() {
+ var value = new entity.Int(3);
+
+ var expectedValueProto = {
+ integerValue: value.value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a double', function() {
+ var value = 8.3;
+
+ var expectedValueProto = {
+ doubleValue: value
+ };
+
+ entity.Double = function(value_) {
+ assert.strictEqual(value_, value);
+ this.value = value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a Double object', function() {
+ var value = new entity.Double(3);
+
+ var expectedValueProto = {
+ doubleValue: value.value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a date', function() {
+ var value = new Date();
+
+ var expectedValueProto = {
+ timestampValue: {
+ seconds: value.getTime() / 1000,
+ nanos: value.getTime() * 1e6
+ }
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a string', function() {
+ var value = 'Hi';
+
+ var expectedValueProto = {
+ stringValue: value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a buffer', function() {
+ var value = new Buffer('Hi');
+
+ var expectedValueProto = {
+ blobValue: value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an array', function() {
+ var value = [{}];
+
+ var expectedValueProto = {
+ arrayValue: {
+ values: value
+ }
+ };
+
+ var run = false;
+
+ var encodeValue = entity.encodeValue;
+ entity.encodeValue = function(value_) {
+ if (!run) {
+ run = true;
+ return encodeValue.apply(null, arguments);
+ }
+
+ assert.strictEqual(value_, value[0]);
+ return value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a Key', function() {
+ var value = new entity.Key({
+ namespace: 'ns',
+ path: ['Kind', 1]
});
- var proto = entity.keyToKeyProto(key);
- assert.strictEqual(proto.partition_id.namespace, 'Namespace');
- assert.strictEqual(proto.path_element[0].kind, 'Kind1');
- assert.strictEqual(proto.path_element[0].id, 1);
- assert.strictEqual(proto.path_element[0].name, undefined);
- assert.strictEqual(proto.path_element[1].kind, 'Kind2');
- assert.strictEqual(proto.path_element[1].id, undefined);
- assert.strictEqual(proto.path_element[1].name, 'name');
- });
- it('should handle incomplete keys with & without namespaces', function() {
- var key = new entity.Key({ path: [ 'Kind1' ] });
- var keyWithNS = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1' ]
+ var expectedValueProto = {
+ keyValue: value
+ };
+
+ entity.keyToKeyProto = function(key) {
+ assert.strictEqual(key, value);
+ return value;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
});
- var keyWithNumericID = new entity.Key({
- path: [ 'Kind1', 234 ]
+
+ it('should encode an object', function() {
+ var value = {
+ key: 'value'
+ };
+
+ var expectedValueProto = {
+ entityValue: {
+ properties: [
+ {
+ name: 'key',
+ value: value.key
+ }
+ ]
+ }
+ };
+
+ var run = false;
+
+ var encodeValue = entity.encodeValue;
+ entity.encodeValue = function(value_) {
+ if (!run) {
+ run = true;
+ return encodeValue.apply(null, arguments);
+ }
+
+ assert.strictEqual(value_, value.key);
+ return value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should throw if an invalid value was provided', function() {
+ assert.throws(function() {
+ entity.encodeValue({});
+ }, /Unsupported field value/);
});
- var keyWithStringID = new entity.Key({
- path: [ 'Kind1', 'StringId' ]
+ });
+
+ describe('entityFromEntityProto', function() {
+ it('should convert entity proto to entity', function() {
+ var expectedEntity = {
+ name: 'Stephen'
+ };
+
+ var entityProto = {
+ properties: {
+ map: {
+ name: {
+ value: {
+ value_type: 'stringValue',
+ stringValue: expectedEntity.name
+ }
+ }
+ }
+ }
+ };
+
+ assert.deepEqual(
+ entity.entityFromEntityProto(entityProto),
+ expectedEntity
+ );
});
+ });
- var proto = entity.keyToKeyProto(key);
- var protoWithNS = entity.keyToKeyProto(keyWithNS);
- var protoWithNumericID = entity.keyToKeyProto(keyWithNumericID);
- var protoWithStringID = entity.keyToKeyProto(keyWithStringID);
+ describe('entityToEntityProto', function() {
+ it('should format an entity', function() {
+ var value = 'Stephen';
- assert.strictEqual(proto.partition_id, undefined);
- assert.strictEqual(proto.path_element[0].kind, 'Kind1');
- assert.strictEqual(proto.path_element[0].id, undefined);
- assert.strictEqual(proto.path_element[0].name, undefined);
+ var entityObject = {
+ name: value
+ };
- assert.strictEqual(protoWithNS.partition_id.namespace, 'Namespace');
- assert.strictEqual(protoWithNS.path_element[0].kind, 'Kind1');
- assert.strictEqual(protoWithNS.path_element[0].id, undefined);
- assert.strictEqual(protoWithNS.path_element[0].name, undefined);
+ var expectedEntityProto = {
+ key: null,
+ properties: entityObject
+ };
- assert.strictEqual(protoWithNumericID.path_element[0].id, 234);
- assert.strictEqual(protoWithStringID.path_element[0].name, 'StringId');
+ entity.encodeValue = function(value_) {
+ assert.strictEqual(value_, value);
+ return value;
+ };
+
+ assert.deepEqual(
+ entity.entityToEntityProto(entityObject),
+ expectedEntityProto
+ );
+ });
});
- it('should throw if key contains 0 items', function() {
- assert.throws(function() {
- var key = new entity.Key({ path: [] });
- entity.keyToKeyProto(key);
- }, /A key should contain at least a kind/);
+ describe('formatArray', function() {
+ it('should convert protos to key/data entity array', function() {
+ var key = {};
+
+ var entityProto = {
+ key: key
+ };
+
+ var results = [
+ {
+ entity: entityProto
+ }
+ ];
+
+ var expectedResults = [
+ {
+ key: key,
+ data: entityProto
+ }
+ ];
+
+ entity.keyFromKeyProto = function(key_) {
+ assert.strictEqual(key_, key);
+ return key;
+ };
+
+ entity.entityFromEntityProto = function(entityProto_) {
+ assert.strictEqual(entityProto_, entityProto);
+ return entityProto;
+ };
+
+ assert.deepEqual(entity.formatArray(results), expectedResults);
+ });
});
- it('should throw if key path contains null ids', function() {
- assert.throws(function() {
+ describe('isKeyComplete', function() {
+ it('should convert key to key proto', function(done) {
var key = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1', null, 'Company' ]
+ path: ['Kind', 123]
});
- entity.keyToKeyProto(key);
- }, /Invalid key. Ancestor keys require an id or name./);
- });
- it('should not throw if last key path item is null', function() {
- assert.doesNotThrow(function() {
+ entity.keyToKeyProto = function(key_) {
+ assert.strictEqual(key_, key);
+ setImmediate(done);
+ return key;
+ };
+
+ entity.isKeyComplete(key);
+ });
+
+ it('should return true if key has id', function() {
var key = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1', 123, 'Company', null ]
+ path: ['Kind', 123]
});
- entity.keyToKeyProto(key);
+
+ assert.strictEqual(entity.isKeyComplete(key), true);
});
- });
-});
-describe('isKeyComplete', function() {
- it('should ret true if kind and an identifier have !0 vals', function() {
- [
- {
- key: new entity.Key({ path: [ 'Kind1' ] }),
- expected: false
- },
- {
- key: new entity.Key({ path: [ 'Kind1', 3 ] }),
- expected: true
- },
- {
- key: new entity.Key({ namespace: 'NS', path: [ 'Kind1' ] }),
- expected: false
- },
- {
- key: new entity.Key({ namespace: 'NS', path: [ 'Kind1', 'name' ] }),
- expected: true
- }
- ].forEach(function(test) {
- assert.strictEqual(entity.isKeyComplete(test.key), test.expected);
+ it('should return true if key has name', function() {
+ var key = new entity.Key({
+ path: ['Kind', 'name']
+ });
+
+ assert.strictEqual(entity.isKeyComplete(key), true);
});
- });
- it('should return false if there is no kind', function() {
- var key = new entity.Key({ path: [ '' ] });
+ it('should return false if key does not have name or ID', function() {
+ var key = new entity.Key({
+ path: ['Kind']
+ });
- assert.strictEqual(entity.isKeyComplete(key), false);
+ assert.strictEqual(entity.isKeyComplete(key), false);
+ });
});
-});
-describe('entityFromEntityProto', function() {
- it('should support bool, int, double, str, entity & list values', function() {
- var obj = entity.entityFromEntityProto(entityProto);
- assert.deepEqual(obj.linkedTo, new entity.Key({ path: [ 'Kind', '123' ]}));
- assert.strictEqual(obj.name, 'Some name');
- assert.strictEqual(obj.flagged, false);
- assert.strictEqual(obj.count, 5);
- assert.strictEqual(obj.total, 7.8);
- assert.strictEqual(obj.author.name, 'Burcu Dogan');
- assert.strictEqual(obj.list[0], 6);
- assert.strictEqual(obj.list[1], false);
- });
-});
+ describe('keyFromKeyProto', function() {
+ var NAMESPACE = 'Namespace';
-describe('entityToEntityProto', function() {
- it('should format an entity', function() {
- var val = entity.entityToEntityProto({
- name: 'name'
- });
- var expected = {
- key: null,
- property: [
+ var keyProto = {
+ partitionId: {
+ namespaceId: NAMESPACE,
+ projectId: 'project-id'
+ },
+ path: [
{
- name: 'name',
- value: {
- string_value: 'name'
- }
+ kind: 'Kind',
+ id: '111'
+ },
+ {
+ kind: 'Kind2',
+ name: 'name'
}
]
};
- assert.deepEqual(val, expected);
- });
-});
-describe('queryToQueryProto', function() {
- it('should support filters and ancestory filtering', function() {
- var ds = datastore.dataset({ projectId: 'project-id' });
- var q = ds.createQuery('Kind1')
- .filter('name =', 'John')
- .end('end')
- .hasAncestor(new entity.Key({ path: [ 'Kind2', 'somename' ] }));
- var proto = entity.queryToQueryProto(q);
- assert.deepEqual(proto, queryFilterProto);
- });
-});
+ it('should set the namespace', function(done) {
+ entity.Key = function(keyOptions) {
+ assert.strictEqual(keyOptions.namespaceId, NAMESPACE);
+ done();
+ };
-describe('propertyToValue', function() {
- it('should translate a buffer', function() {
- var buffer = new Buffer('010159406940');
- var property = {
- blob_value: ByteBuffer.wrap(buffer)
- };
- var returnedbuffer = entity.propertyToValue(property);
- assert.deepEqual(buffer, returnedbuffer);
- });
-});
+ entity.keyFromKeyProto(keyProto);
+ });
-describe('valueToProperty', function() {
- it('should translate a boolean', function() {
- var val = entity.valueToProperty(true);
- assert.deepEqual(val, {
- boolean_value: true
+ it('should create a proper Key', function(done) {
+ entity.Key = function(keyOptions) {
+ assert.deepEqual(keyOptions, {
+ namespaceId: NAMESPACE,
+ path: [
+ 'Kind',
+ 111,
+ 'Kind2',
+ 'name'
+ ]
+ });
+
+ done();
+ };
+
+ entity.keyFromKeyProto(keyProto);
});
- });
- it('should translate an int', function() {
- var val1 = entity.valueToProperty(new entity.Int(3));
- var val2 = entity.valueToProperty(3);
- var expected = { integer_value: 3 };
- assert.deepEqual(val1, expected);
- assert.deepEqual(val2, expected);
- });
+ it('should return the created Key', function() {
+ var expectedValue = {};
- it('should translate a double', function() {
- var val1 = entity.valueToProperty(new entity.Double(3.1));
- var val2 = entity.valueToProperty(3.1);
- var expected = { double_value: 3.1 };
- assert.deepEqual(val1, expected);
- assert.deepEqual(val2, expected);
- });
+ entity.Key = function() {
+ return expectedValue;
+ };
- it('should translate a date', function() {
- var date = new Date();
- var val = entity.valueToProperty(date);
- var expected = {
- timestamp_microseconds_value: date.getTime() * 1000
- };
- assert.deepEqual(val, expected);
- });
+ assert.strictEqual(entity.keyFromKeyProto(keyProto), expectedValue);
+ });
- it('should translate a string', function() {
- var val = entity.valueToProperty('Hi');
- var expected = {
- string_value: 'Hi'
- };
- assert.deepEqual(val, expected);
- });
+ it('should throw if path is invalid', function(done) {
+ var keyProtoInvalid = {
+ partitionId: {
+ namespaceId: 'Namespace',
+ projectId: 'project-id'
+ },
+ path: [
+ {
+ kind: 'Kind'
+ },
+ {
+ kind: 'Kind2'
+ }
+ ]
+ };
- it('should translate a buffer', function() {
- var buffer = new Buffer('Hi');
- var val = entity.valueToProperty(buffer);
- var expected = {
- blob_value: buffer
- };
- assert.deepEqual(val, expected);
+ try {
+ entity.keyFromKeyProto(keyProtoInvalid);
+ } catch(e) {
+ assert.strictEqual(e.name, 'InvalidKey');
+ assert.strictEqual(e.message, 'Ancestor keys require an id or name.');
+ done();
+ }
+ });
});
- it('should translate an array', function() {
- var array = [1, '2', true];
- var val = entity.valueToProperty(array);
- var expected = {
- list_value: [
- { integer_value: 1 },
- { string_value: '2' },
- { boolean_value: true }
- ]
- };
- assert.deepEqual(val, expected);
- });
+ describe('keyToKeyProto', function() {
+ it('should handle hierarchical key definitions', function() {
+ var key = new entity.Key({
+ path: ['Kind1', 1, 'Kind2', 'name']
+ });
+
+ var keyProto = entity.keyToKeyProto(key);
+
+ assert.strictEqual(keyProto.partitionId, undefined);
- it('should translate a Key', function() {
- var key = new entity.Key({
- namespace: 'ns',
- path: ['Kind', 3]
+ assert.strictEqual(keyProto.path[0].kind, 'Kind1');
+ assert.strictEqual(keyProto.path[0].id, 1);
+ assert.strictEqual(keyProto.path[0].name, undefined);
+
+ assert.strictEqual(keyProto.path[1].kind, 'Kind2');
+ assert.strictEqual(keyProto.path[1].id, undefined);
+ assert.strictEqual(keyProto.path[1].name, 'name');
+ });
+
+ it('should detect the namespace of the hierarchical keys', function() {
+ var key = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind1', 1, 'Kind2', 'name']
+ });
+
+ var keyProto = entity.keyToKeyProto(key);
+
+ assert.strictEqual(keyProto.partitionId.namespaceId, 'Namespace');
+
+ assert.strictEqual(keyProto.path[0].kind, 'Kind1');
+ assert.strictEqual(keyProto.path[0].id, 1);
+ assert.strictEqual(keyProto.path[0].name, undefined);
+
+ assert.strictEqual(keyProto.path[1].kind, 'Kind2');
+ assert.strictEqual(keyProto.path[1].id, undefined);
+ assert.strictEqual(keyProto.path[1].name, 'name');
+ });
+
+ it('should handle incomplete keys with & without namespaces', function() {
+ var incompleteKey = new entity.Key({
+ path: ['Kind']
+ });
+
+ var incompleteKeyWithNs = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind']
+ });
+
+ var keyProto = entity.keyToKeyProto(incompleteKey);
+ var keyProtoWithNs = entity.keyToKeyProto(incompleteKeyWithNs);
+
+ assert.strictEqual(keyProto.partitionId, undefined);
+ assert.strictEqual(keyProto.path[0].kind, 'Kind');
+ assert.strictEqual(keyProto.path[0].id, undefined);
+ assert.strictEqual(keyProto.path[0].name, undefined);
+
+ assert.strictEqual(keyProtoWithNs.partitionId.namespaceId, 'Namespace');
+ assert.strictEqual(keyProtoWithNs.path[0].kind, 'Kind');
+ assert.strictEqual(keyProtoWithNs.path[0].id, undefined);
+ assert.strictEqual(keyProtoWithNs.path[0].name, undefined);
+ });
+
+ it('should throw if key contains 0 items', function(done) {
+ var key = new entity.Key({
+ path: []
+ });
+
+ try {
+ entity.keyToKeyProto(key);
+ } catch(e) {
+ assert.strictEqual(e.name, 'InvalidKey');
+ assert.strictEqual(e.message, 'A key should contain at least a kind.');
+ done();
+ }
+ });
+
+ it('should throw if key path contains null ids', function(done) {
+ var key = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind1', null, 'Company']
+ });
+
+ try {
+ entity.keyToKeyProto(key);
+ } catch(e) {
+ assert.strictEqual(e.name, 'InvalidKey');
+ assert.strictEqual(e.message, 'Ancestor keys require an id or name.');
+ done();
+ }
});
- var val = entity.valueToProperty(key);
- var expected = {
- key_value: entity.keyToKeyProto(key)
- };
- assert.deepEqual(val, expected);
- });
- describe('objects', function() {
- it('should translate an object', function() {
- var val = entity.valueToProperty({
- name: 'value'
+ it('should not throw if key is incomplete', function() {
+ var key = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind1', 123, 'Company', null]
});
- var expected = {
- entity_value: {
- property: [
+
+ assert.doesNotThrow(function() {
+ entity.keyToKeyProto(key);
+ });
+ });
+ });
+
+ describe('queryToQueryProto', function() {
+ var queryProto = {
+ distinctOn: [
+ {
+ name: 'name'
+ }
+ ],
+ kind: [
+ {
+ name: 'Kind1'
+ }
+ ],
+ order: [
+ {
+ property: {
+ name: 'name'
+ },
+ direction: 'ASCENDING'
+ }
+ ],
+ projection: [
+ {
+ property: {
+ name: 'name'
+ }
+ }
+ ],
+ endCursor: new Buffer('end', 'base64'),
+ limit: {
+ value: 1
+ },
+ offset: 1,
+ startCursor: new Buffer('start', 'base64'),
+ filter: {
+ compositeFilter: {
+ filters: [
{
- name: 'name',
- value: {
- string_value: 'value',
+ propertyFilter: {
+ property: {
+ name: 'name'
+ },
+ op: 'EQUAL',
+ value: {
+ stringValue: 'John'
+ }
+ }
+ },
+ {
+ propertyFilter: {
+ property: {
+ name: '__key__'
+ },
+ op: 'HAS_ANCESTOR',
+ value: {
+ keyValue: {
+ path: [
+ {
+ kind: 'Kind2',
+ name: 'somename'
+ }
+ ]
+ }
+ }
}
}
- ]
- },
- indexed: false
- };
- assert.deepEqual(val, expected);
- });
+ ],
+ op: 'AND'
+ }
+ }
+ };
- it('should not translate a key-less object', function() {
- assert.throws(function() {
- entity.valueToProperty({});
- }, /Unsupported field value/);
+ it('should support all configurations of a query', function() {
+ var ancestorKey = new entity.Key({
+ path: ['Kind2', 'somename']
+ });
+
+ var ds = new Datastore({ projectId: 'project-id' });
+
+ var query = ds.createQuery('Kind1')
+ .filter('name =', 'John')
+ .start('start')
+ .end('end')
+ .groupBy(['name'])
+ .order('+name')
+ .select('name')
+ .limit(1)
+ .offset(1)
+ .hasAncestor(ancestorKey);
+
+ assert.deepEqual(entity.queryToQueryProto(query), queryProto);
});
});
});
diff --git a/test/datastore/index.js b/test/datastore/index.js
index be9a0541a000..e63fa732a9f9 100644
--- a/test/datastore/index.js
+++ b/test/datastore/index.js
@@ -30,8 +30,10 @@ var entity = {
var assert = require('assert');
var mockery = require('mockery');
+var util = require('../../lib/common/util.js');
+
describe('Datastore', function() {
- var datastore;
+ var Datastore;
before(function() {
mockery.registerMock('./entity', entity);
@@ -39,7 +41,11 @@ describe('Datastore', function() {
useCleanCache: true,
warnOnUnregistered: false
});
- datastore = require('../../lib/datastore/index.js');
+ });
+
+ beforeEach(function() {
+ delete require.cache[require.resolve('../../lib/datastore/index.js')];
+ Datastore = require('../../lib/datastore/index.js');
});
after(function() {
@@ -47,19 +53,249 @@ describe('Datastore', function() {
mockery.disable();
});
- it('should expose Dataset class', function() {
- assert.equal(typeof datastore.dataset, 'function');
+ describe('instantiation', function() {
+ it('should throw if a projectId is not specified', function() {
+ assert.throws(function() {
+ new Datastore();
+ }, /Sorry, we cannot connect/);
+ });
+
+ it('should set default API connection details', function() {
+ var options = { a: 'b', c: 'd', projectId: 'project-id' };
+ var mockApiEndpoint = 'http://localhost:8080';
+
+ Datastore.determineApiEndpoint_ = function(opts) {
+ assert.deepEqual(opts, options);
+ return mockApiEndpoint;
+ };
+
+ var ds = new Datastore(options);
+ assert.strictEqual(ds.apiEndpoint, mockApiEndpoint);
+ });
});
- it('should expose Int builder', function() {
- var anInt = 7;
- datastore.int(anInt);
- assert.equal(entity.intCalledWith, anInt);
+ describe('double', function() {
+ it('should expose Double builder', function() {
+ var aDouble = 7.0;
+ Datastore.double(aDouble);
+ assert.strictEqual(entity.doubleCalledWith, aDouble);
+ });
});
- it('should expose Double builder', function() {
- var aDouble = 7.0;
- datastore.double(aDouble);
- assert.equal(entity.doubleCalledWith, aDouble);
+ describe('int', function() {
+ it('should expose Int builder', function() {
+ var anInt = 7;
+ Datastore.int(anInt);
+ assert.strictEqual(entity.intCalledWith, anInt);
+ });
+ });
+
+ describe('determineApiEndpoint_', function() {
+ it('should default to googleapis.com', function() {
+ delete process.env.DATASTORE_HOST;
+
+ var expectedApiEndpoint = 'https://www.googleapis.com';
+ var actualApiEndpoint = Datastore.determineApiEndpoint_({});
+
+ assert.strictEqual(actualApiEndpoint, expectedApiEndpoint);
+ });
+
+ it('should remove slashes from the apiEndpoint', function() {
+ var expectedApiEndpoint = 'http://localhost:8080';
+
+ assert.strictEqual(Datastore.determineApiEndpoint_({
+ apiEndpoint: expectedApiEndpoint
+ }), expectedApiEndpoint);
+
+ assert.strictEqual(Datastore.determineApiEndpoint_({
+ apiEndpoint: 'http://localhost:8080/'
+ }), expectedApiEndpoint);
+
+ assert.strictEqual(Datastore.determineApiEndpoint_({
+ apiEndpoint: 'http://localhost:8080//'
+ }), expectedApiEndpoint);
+ });
+
+ it('should default to http if protocol is unspecified', function() {
+ var apiEndpoint = Datastore.determineApiEndpoint_({
+ apiEndpoint: 'localhost:8080'
+ });
+
+ assert.strictEqual(apiEndpoint, 'http://localhost:8080');
+ });
+
+ describe('with DATASTORE_HOST environment variable', function() {
+ var DATASTORE_HOST = 'http://localhost:8080';
+
+ before(function() {
+ process.env.DATASTORE_HOST = DATASTORE_HOST;
+ });
+
+ after(function() {
+ delete process.env.DATASTORE_HOST;
+ });
+
+ it('should use the DATASTORE_HOST env var', function() {
+ assert.strictEqual(Datastore.determineApiEndpoint_({}), DATASTORE_HOST);
+ });
+
+ it('should favor an explicit apiEndpoint option', function() {
+ var expectedApiEndpoint = 'http://apiendpointoverride';
+
+ assert.strictEqual(Datastore.determineApiEndpoint_({
+ apiEndpoint: expectedApiEndpoint
+ }), expectedApiEndpoint);
+ });
+ });
+ });
+
+ describe('createQuery', function() {
+ var ds;
+ var dsWithNs;
+
+ beforeEach(function() {
+ ds = new Datastore({ projectId: 'test' });
+
+ dsWithNs = new Datastore({
+ projectId: 'test',
+ namespace: 'my-ns'
+ });
+ });
+
+ it('should not include a namespace on a ns-less dataset', function() {
+ var query = ds.createQuery('Kind');
+ assert.strictEqual(query.namespace, undefined);
+ });
+
+ it('should scope query to namespace', function() {
+ var query = dsWithNs.createQuery('Kind');
+ assert.strictEqual(query.namespace, 'my-ns');
+ });
+
+ it('should allow control over namespace and kinds', function() {
+ var queryFromDs = ds.createQuery('my-ns', 'Kind');
+ assert.strictEqual(queryFromDs.namespace, 'my-ns');
+
+ var queryFromDsWithNs = dsWithNs.createQuery('Kind');
+ assert.strictEqual(queryFromDsWithNs.namespace, 'my-ns');
+ });
+
+ it('should allow removal of namespace', function() {
+ var query = dsWithNs.createQuery(null, 'Kind');
+ assert.strictEqual(query.namespace, null);
+ });
+ });
+
+ describe('key', function() {
+ it('should return key scoped by default namespace', function() {
+ var ds = new Datastore({ projectId: 'test', namespace: 'my-ns' });
+ var key = ds.key(['Company', 1]);
+ assert.strictEqual(key.namespace, 'my-ns');
+ assert.deepEqual(key.path, ['Company', 1]);
+ });
+
+ it('should allow namespace specification', function() {
+ var ds = new Datastore({ projectId: 'test', namespace: 'my-ns' });
+ var key = ds.key({
+ namespace: 'custom-ns',
+ path: ['Company', 1]
+ });
+ assert.strictEqual(key.namespace, 'custom-ns');
+ assert.deepEqual(key.path, ['Company', 1]);
+ });
+
+ it('should create incomplete key from string', function() {
+ var ds = new Datastore({ projectId: 'test' });
+ var key = ds.key('hello');
+ assert.deepEqual(key.path, ['hello']);
+ });
+
+ it('should create incomplete key from array in obj', function() {
+ var ds = new Datastore({ projectId: 'test' });
+ var key = ds.key({
+ path: ['world']
+ });
+ assert.deepEqual(key.path, ['world']);
+ });
+
+ it('should create incomplete key from array', function() {
+ var ds = new Datastore({ projectId: 'test' });
+ var key = ds.key(['Company']);
+ assert.deepEqual(key.path, ['Company']);
+ });
+ });
+
+ describe('runInTransaction', function() {
+ var ds;
+
+ beforeEach(function() {
+ ds = new Datastore({ projectId: 'test' });
+ });
+
+ it('should begin transaction', function(done) {
+ ds.createTransaction_ = function() {
+ return {
+ begin_: function() {
+ done();
+ }
+ };
+ };
+
+ ds.runInTransaction();
+ });
+
+ it('should execute callback with error if one occurred', function(done) {
+ var error = new Error('Error.');
+ var apiResponse = {};
+
+ ds.createTransaction_ = function() {
+ return {
+ begin_: function(callback) {
+ callback(error, apiResponse);
+ }
+ };
+ };
+
+ ds.runInTransaction(util.noop, function(err, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
+ });
+ });
+
+ it('should return transaction object to the callback', function(done) {
+ var transaction = {
+ begin_: function(callback) {
+ callback();
+ },
+ commit_: util.noop
+ };
+
+ ds.createTransaction_ = function() {
+ return transaction;
+ };
+
+ ds.runInTransaction(function(t) {
+ assert.deepEqual(t, transaction);
+ done();
+ }, assert.ifError);
+ });
+
+ it('should return correct done function to the callback', function(done) {
+ ds.createTransaction_ = function() {
+ return {
+ begin_: function(callback) {
+ callback();
+ },
+ commit_: function() {
+ done();
+ }
+ };
+ };
+
+ ds.runInTransaction(function(t, tDone) {
+ tDone();
+ }, assert.ifError);
+ });
});
});
diff --git a/test/datastore/request.js b/test/datastore/request.js
index 88786e9d5a8f..bf9e38dcbc74 100644
--- a/test/datastore/request.js
+++ b/test/datastore/request.js
@@ -25,7 +25,7 @@ var format = require('string-format-obj');
var is = require('is');
var mockery = require('mockery');
var mockRespGet = require('../testdata/response_get.json');
-var pb = require('../../lib/datastore/pb.js');
+var pb = require('../../lib/datastore/proto.js');
var Query = require('../../lib/datastore/query.js');
var requestModule = require('request');
var stream = require('stream');