diff --git a/.gitignore b/.gitignore
index fc141b1ca80b..c69d02bb0e65 100644
--- a/.gitignore
+++ b/.gitignore
@@ -53,3 +53,4 @@ scripts/pylintrc_reduced
# Directories used for creating generated PB2 files
generated_python/
cloud-bigtable-client/
+googleapis-pb/
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 66fc46d3fb98..c2b509bfc2ff 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -162,8 +162,6 @@ Running System Tests
- ``GCLOUD_TESTS_PROJECT_ID``: Developers Console project ID (e.g.
bamboo-shift-455).
- - ``GCLOUD_TESTS_DATASET_ID``: The name of the dataset your tests connect to.
- This is typically the same as ``GCLOUD_TESTS_PROJECT_ID``.
- ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
see ``system_tests/app_credentials.json.sample`` as an example. Such a file
can be downloaded directly from the developer's console by clicking
@@ -195,7 +193,7 @@ Running System Tests
# Create the indexes
$ gcloud preview datastore create-indexes system_tests/data/index.yaml \
- > --project=$GCLOUD_TESTS_DATASET_ID
+ > --project=$GCLOUD_TESTS_PROJECT_ID
# Restore your environment to its previous state.
$ unset CLOUDSDK_PYTHON_SITEPACKAGES
diff --git a/Makefile b/Makefile
index 64040005d325..3e7e63e6e26a 100644
--- a/Makefile
+++ b/Makefile
@@ -1,8 +1,10 @@
GENERATED_DIR=$(shell pwd)/generated_python
-FINAL_DIR=$(shell pwd)/gcloud/bigtable/_generated
+BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/_generated
+DATASTORE_DIR=$(shell pwd)/gcloud/datastore/_generated
GRPC_PLUGIN=grpc_python_plugin
PROTOC_CMD=protoc
-PROTOS_DIR=$(shell pwd)/cloud-bigtable-client/bigtable-protos/src/main/proto
+BIGTABLE_PROTOS_DIR=$(shell pwd)/cloud-bigtable-client/bigtable-protos/src/main/proto
+GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb
help:
@echo 'Makefile for gcloud-python Bigtable protos '
@@ -12,42 +14,58 @@ help:
@echo ' make clean Clean generated files '
generate:
- [ -d cloud-bigtable-client ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client
+ # Retrieve git repos that have our *.proto files.
+ [ -d cloud-bigtable-client ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client --depth=1
cd cloud-bigtable-client && git pull origin master
+ [ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
+ cd googleapis-pb && git pull origin master
+ # Make the directory where our *_pb2.py files will go.
mkdir -p $(GENERATED_DIR)
# Generate all *_pb2.py files that require gRPC.
$(PROTOC_CMD) \
- --proto_path=$(PROTOS_DIR) \
+ --proto_path=$(BIGTABLE_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
--plugin=protoc-gen-grpc=$(GRPC_PLUGIN) \
--grpc_out=$(GENERATED_DIR) \
- $(PROTOS_DIR)/google/bigtable/v1/bigtable_service.proto \
- $(PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto \
- $(PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service.proto
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_service.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service.proto
# Generate all *_pb2.py files that do not require gRPC.
$(PROTOC_CMD) \
- --proto_path=$(PROTOS_DIR) \
+ --proto_path=$(BIGTABLE_PROTOS_DIR) \
+ --proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
- $(PROTOS_DIR)/google/bigtable/v1/bigtable_data.proto \
- $(PROTOS_DIR)/google/bigtable/v1/bigtable_service_messages.proto \
- $(PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto \
- $(PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto \
- $(PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_data.proto \
- $(PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service_messages.proto
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_data.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_service_messages.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_data.proto \
+ $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service_messages.proto \
+ $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/datastore.proto \
+ $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/entity.proto \
+ $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/query.proto
# Move the newly generated *_pb2.py files into our library.
- mv $(GENERATED_DIR)/google/bigtable/v1/* $(FINAL_DIR)
- mv $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(FINAL_DIR)
- mv $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(FINAL_DIR)
+ mv $(GENERATED_DIR)/google/bigtable/v1/* $(BIGTABLE_DIR)
+ mv $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(BIGTABLE_DIR)
+ mv $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(BIGTABLE_DIR)
+ mv $(GENERATED_DIR)/google/datastore/v1beta3/* $(DATASTORE_DIR)
# Remove all existing *.proto files before we replace
- rm -f $(FINAL_DIR)/*.proto
+ rm -f $(BIGTABLE_DIR)/*.proto
+ rm -f $(DATASTORE_DIR)/*.proto
# Copy over the *.proto files into our library.
- cp $(PROTOS_DIR)/google/bigtable/v1/*.proto $(FINAL_DIR)
- cp $(PROTOS_DIR)/google/bigtable/admin/cluster/v1/*.proto $(FINAL_DIR)
- cp $(PROTOS_DIR)/google/bigtable/admin/table/v1/*.proto $(FINAL_DIR)
- cp $(PROTOS_DIR)/google/longrunning/operations.proto $(FINAL_DIR)
+ cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/*.proto $(BIGTABLE_DIR)
+ cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/*.proto $(BIGTABLE_DIR)
+ cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/*.proto $(BIGTABLE_DIR)
+ cp $(BIGTABLE_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
+ cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/*.proto $(DATASTORE_DIR)
# Rename all *.proto files in our library with an
# underscore and remove executable bit.
- cd $(FINAL_DIR) && \
+ cd $(BIGTABLE_DIR) && \
+ for filename in *.proto; do \
+ chmod -x $$filename ; \
+ mv $$filename _$$filename ; \
+ done
+ cd $(DATASTORE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
@@ -56,6 +74,9 @@ generate:
# non-gRPC parts so that the protos from `googleapis-common-protos`
# can be used without gRPC.
python scripts/make_operations_grpc.py
+ # Separate the gRPC parts of the datastore service from the
+ # non-gRPC parts so that the protos can be used without gRPC.
+ python scripts/make_datastore_grpc.py
# Rewrite the imports in the generated *_pb2.py files.
python scripts/rewrite_imports.py
diff --git a/README.rst b/README.rst
index 0b2e42897dd2..1dab778713e6 100644
--- a/README.rst
+++ b/README.rst
@@ -70,7 +70,7 @@ writes, strong consistency for reads and ancestor queries, and eventual
consistency for all other queries.
.. _Cloud Datastore: https://cloud.google.com/datastore/docs
-.. _Datastore API docs: https://cloud.google.com/datastore/docs/apis/v1beta2/
+.. _Datastore API docs: https://cloud.google.com/datastore/docs/apis/v1beta3/
See the ``gcloud-python`` API `datastore documentation`_ to learn how to
interact with the Cloud Datastore using this Client Library.
diff --git a/gcloud/datastore/_datastore_v1.proto b/gcloud/datastore/_datastore_v1.proto
deleted file mode 100644
index 3d562709ddef..000000000000
--- a/gcloud/datastore/_datastore_v1.proto
+++ /dev/null
@@ -1,594 +0,0 @@
-// Copyright 2013 Google Inc. All Rights Reserved.
-//
-// The datastore v1 service proto definitions
-
-syntax = "proto2";
-
-package api.services.datastore;
-option java_package = "com.google.api.services.datastore";
-
-
-// An identifier for a particular subset of entities.
-//
-// Entities are partitioned into various subsets, each used by different
-// datasets and different namespaces within a dataset and so forth.
-//
-// All input partition IDs are normalized before use.
-// A partition ID is normalized as follows:
-// If the partition ID is unset or is set to an empty partition ID, replace it
-// with the context partition ID.
-// Otherwise, if the partition ID has no dataset ID, assign it the context
-// partition ID's dataset ID.
-// Unless otherwise documented, the context partition ID has the dataset ID set
-// to the context dataset ID and no other partition dimension set.
-//
-// A partition ID is empty if all of its fields are unset.
-//
-// Partition dimension:
-// A dimension may be unset.
-// A dimension's value must never be "".
-// A dimension's value must match [A-Za-z\d\.\-_]{1,100}
-// If the value of any dimension matches regex "__.*__",
-// the partition is reserved/read-only.
-// A reserved/read-only partition ID is forbidden in certain documented contexts.
-//
-// Dataset ID:
-// A dataset id's value must never be "".
-// A dataset id's value must match
-// ([a-z\d\-]{1,100}~)?([a-z\d][a-z\d\-\.]{0,99}:)?([a-z\d][a-z\d\-]{0,99}
-message PartitionId {
- // The dataset ID.
- optional string dataset_id = 3;
- // The namespace.
- optional string namespace = 4;
-}
-
-// A unique identifier for an entity.
-// If a key's partition id or any of its path kinds or names are
-// reserved/read-only, the key is reserved/read-only.
-// A reserved/read-only key is forbidden in certain documented contexts.
-message Key {
- // Entities are partitioned into subsets, currently identified by a dataset
- // (usually implicitly specified by the project) and namespace ID.
- // Queries are scoped to a single partition.
- optional PartitionId partition_id = 1;
-
- // A (kind, ID/name) pair used to construct a key path.
- //
- // At most one of name or ID may be set.
- // If either is set, the element is complete.
- // If neither is set, the element is incomplete.
- message PathElement {
- // The kind of the entity.
- // A kind matching regex "__.*__" is reserved/read-only.
- // A kind must not contain more than 500 characters.
- // Cannot be "".
- required string kind = 1;
- // The ID of the entity.
- // Never equal to zero. Values less than zero are discouraged and will not
- // be supported in the future.
- optional int64 id = 2;
- // The name of the entity.
- // A name matching regex "__.*__" is reserved/read-only.
- // A name must not be more than 500 characters.
- // Cannot be "".
- optional string name = 3;
- }
-
- // The entity path.
- // An entity path consists of one or more elements composed of a kind and a
- // string or numerical identifier, which identify entities. The first
- // element identifies a root entity, the second element identifies
- // a child of the root entity, the third element a child of the
- // second entity, and so forth. The entities identified by all prefixes of
- // the path are called the element's ancestors.
- // An entity path is always fully complete: ALL of the entity's ancestors
- // are required to be in the path along with the entity identifier itself.
- // The only exception is that in some documented cases, the identifier in the
- // last path element (for the entity) itself may be omitted. A path can never
- // be empty.
- repeated PathElement path_element = 2;
-}
-
-// A message that can hold any of the supported value types and associated
-// metadata.
-//
-// At most one of the Value fields may be set.
-// If none are set the value is "null".
-//
-message Value {
- // A boolean value.
- optional bool boolean_value = 1;
- // An integer value.
- optional int64 integer_value = 2;
- // A double value.
- optional double double_value = 3;
- // A timestamp value.
- optional int64 timestamp_microseconds_value = 4;
- // A key value.
- optional Key key_value = 5;
- // A blob key value.
- optional string blob_key_value = 16;
- // A UTF-8 encoded string value.
- optional string string_value = 17;
- // A blob value.
- optional bytes blob_value = 18;
- // An entity value.
- // May have no key.
- // May have a key with an incomplete key path.
- // May have a reserved/read-only key.
- optional Entity entity_value = 6;
- // A list value.
- // Cannot contain another list value.
- // Cannot also have a meaning and indexing set.
- repeated Value list_value = 7;
-
- // The meaning
field is reserved and should not be used.
- optional int32 meaning = 14;
-
- // If the value should be indexed.
- //
- // The indexed
property may be set for a
- // null
value.
- // When indexed
is true
, stringValue
- // is limited to 500 characters and the blob value is limited to 500 bytes.
- // Exception: If meaning is set to 2, string_value is limited to 2038
- // characters regardless of indexed.
- // When indexed is true, meaning 15 and 22 are not allowed, and meaning 16
- // will be ignored on input (and will never be set on output).
- // Input values by default have indexed
set to
- // true
; however, you can explicitly set indexed
to
- // true
if you want. (An output value never has
- // indexed
explicitly set to true
.) If a value is
- // itself an entity, it cannot have indexed
set to
- // true
.
- // Exception: An entity value with meaning 9, 20 or 21 may be indexed.
- optional bool indexed = 15 [default = true];
-}
-
-// An entity property.
-message Property {
- // The name of the property.
- // A property name matching regex "__.*__" is reserved.
- // A reserved property name is forbidden in certain documented contexts.
- // The name must not contain more than 500 characters.
- // Cannot be "".
- required string name = 1;
-
- // The value(s) of the property.
- // Each value can have only one value property populated. For example,
- // you cannot have a values list of { value: { integerValue: 22,
- // stringValue: "a" } }
, but you can have { value: { listValue:
- // [ { integerValue: 22 }, { stringValue: "a" } ] }
.
- required Value value = 4;
-}
-
-// An entity.
-//
-// An entity is limited to 1 megabyte when stored. That roughly
-// corresponds to a limit of 1 megabyte for the serialized form of this
-// message.
-message Entity {
- // The entity's key.
- //
- // An entity must have a key, unless otherwise documented (for example,
- // an entity in Value.entityValue
may have no key).
- // An entity's kind is its key's path's last element's kind,
- // or null if it has no key.
- optional Key key = 1;
- // The entity's properties.
- // Each property's name must be unique for its entity.
- repeated Property property = 2;
-}
-
-// The result of fetching an entity from the datastore.
-message EntityResult {
- // Specifies what data the 'entity' field contains.
- // A ResultType is either implied (for example, in LookupResponse.found it
- // is always FULL) or specified by context (for example, in message
- // QueryResultBatch, field 'entity_result_type' specifies a ResultType
- // for all the values in field 'entity_result').
- enum ResultType {
- FULL = 1; // The entire entity.
- PROJECTION = 2; // A projected subset of properties.
- // The entity may have no key.
- // A property value may have meaning 18.
- KEY_ONLY = 3; // Only the key.
- }
-
- // The resulting entity.
- required Entity entity = 1;
-}
-
-// A query.
-message Query {
- // The projection to return. If not set the entire entity is returned.
- repeated PropertyExpression projection = 2;
-
- // The kinds to query (if empty, returns entities from all kinds).
- repeated KindExpression kind = 3;
-
- // The filter to apply (optional).
- optional Filter filter = 4;
-
- // The order to apply to the query results (if empty, order is unspecified).
- repeated PropertyOrder order = 5;
-
- // The properties to group by (if empty, no grouping is applied to the
- // result set).
- repeated PropertyReference group_by = 6;
-
- // A starting point for the query results. Optional. Query cursors are
- // returned in query result batches.
- optional bytes /* serialized QueryCursor */ start_cursor = 7;
-
- // An ending point for the query results. Optional. Query cursors are
- // returned in query result batches.
- optional bytes /* serialized QueryCursor */ end_cursor = 8;
-
- // The number of results to skip. Applies before limit, but after all other
- // constraints (optional, defaults to 0).
- optional int32 offset = 10 [default=0];
-
- // The maximum number of results to return. Applies after all other
- // constraints. Optional.
- optional int32 limit = 11;
-}
-
-// A representation of a kind.
-message KindExpression {
- // The name of the kind.
- required string name = 1;
-}
-
-// A reference to a property relative to the kind expressions.
-// exactly.
-message PropertyReference {
- // The name of the property.
- required string name = 2;
-}
-
-// A representation of a property in a projection.
-message PropertyExpression {
- enum AggregationFunction {
- FIRST = 1;
- }
- // The property to project.
- required PropertyReference property = 1;
- // The aggregation function to apply to the property. Optional.
- // Can only be used when grouping by at least one property. Must
- // then be set on all properties in the projection that are not
- // being grouped by.
- optional AggregationFunction aggregation_function = 2;
-}
-
-// The desired order for a specific property.
-message PropertyOrder {
- enum Direction {
- ASCENDING = 1;
- DESCENDING = 2;
- }
- // The property to order by.
- required PropertyReference property = 1;
- // The direction to order by.
- optional Direction direction = 2 [default=ASCENDING];
-}
-
-// A holder for any type of filter. Exactly one field should be specified.
-message Filter {
- // A composite filter.
- optional CompositeFilter composite_filter = 1;
- // A filter on a property.
- optional PropertyFilter property_filter = 2;
-}
-
-// A filter that merges the multiple other filters using the given operation.
-message CompositeFilter {
- enum Operator {
- AND = 1;
- }
-
- // The operator for combining multiple filters.
- required Operator operator = 1;
- // The list of filters to combine.
- // Must contain at least one filter.
- repeated Filter filter = 2;
-}
-
-// A filter on a specific property.
-message PropertyFilter {
- enum Operator {
- LESS_THAN = 1;
- LESS_THAN_OR_EQUAL = 2;
- GREATER_THAN = 3;
- GREATER_THAN_OR_EQUAL = 4;
- EQUAL = 5;
-
- HAS_ANCESTOR = 11;
- }
-
- // The property to filter by.
- required PropertyReference property = 1;
- // The operator to filter by.
- required Operator operator = 2;
- // The value to compare the property to.
- required Value value = 3;
-}
-
-// A GQL query.
-message GqlQuery {
- required string query_string = 1;
- // When false, the query string must not contain a literal.
- optional bool allow_literal = 2 [default = false];
- // A named argument must set field GqlQueryArg.name.
- // No two named arguments may have the same name.
- // For each non-reserved named binding site in the query string,
- // there must be a named argument with that name,
- // but not necessarily the inverse.
- repeated GqlQueryArg name_arg = 3;
- // Numbered binding site @1 references the first numbered argument,
- // effectively using 1-based indexing, rather than the usual 0.
- // A numbered argument must NOT set field GqlQueryArg.name.
- // For each binding site numbered i in query_string,
- // there must be an ith numbered argument.
- // The inverse must also be true.
- repeated GqlQueryArg number_arg = 4;
-}
-
-// A binding argument for a GQL query.
-// Exactly one of fields value and cursor must be set.
-message GqlQueryArg {
- // Must match regex "[A-Za-z_$][A-Za-z_$0-9]*".
- // Must not match regex "__.*__".
- // Must not be "".
- optional string name = 1;
- optional Value value = 2;
- optional bytes cursor = 3;
-}
-
-// A batch of results produced by a query.
-message QueryResultBatch {
- // The possible values for the 'more_results' field.
- enum MoreResultsType {
- NOT_FINISHED = 1; // There are additional batches to fetch from this query.
- MORE_RESULTS_AFTER_LIMIT = 2; // The query is finished, but there are more
- // results after the limit.
- NO_MORE_RESULTS = 3; // The query has been exhausted.
- }
-
- // The result type for every entity in entityResults.
- required EntityResult.ResultType entity_result_type = 1;
- // The results for this batch.
- repeated EntityResult entity_result = 2;
-
- // A cursor that points to the position after the last result in the batch.
- // May be absent.
- optional bytes /* serialized QueryCursor */ end_cursor = 4;
-
- // The state of the query after the current batch.
- required MoreResultsType more_results = 5;
-
- // The number of results skipped because of Query.offset
.
- optional int32 skipped_results = 6;
-}
-
-// A set of changes to apply.
-//
-// No entity in this message may have a reserved property name,
-// not even a property in an entity in a value.
-// No value in this message may have meaning 18,
-// not even a value in an entity in another value.
-//
-// If entities with duplicate keys are present, an arbitrary choice will
-// be made as to which is written.
-message Mutation {
- // Entities to upsert.
- // Each upserted entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity upsert = 1;
- // Entities to update.
- // Each updated entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity update = 2;
- // Entities to insert.
- // Each inserted entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity insert = 3;
- // Insert entities with a newly allocated ID.
- // Each inserted entity's key must omit the final identifier in its path and
- // must not be reserved/read-only.
- repeated Entity insert_auto_id = 4;
- // Keys of entities to delete.
- // Each key must have a complete key path and must not be reserved/read-only.
- repeated Key delete = 5;
- // Ignore a user specified read-only period. Optional.
- optional bool force = 6;
-}
-
-// The result of applying a mutation.
-message MutationResult {
- // Number of index writes.
- required int32 index_updates = 1;
- // Keys for insertAutoId
entities. One per entity from the
- // request, in the same order.
- repeated Key insert_auto_id_key = 2;
-}
-
-// Options shared by read requests.
-message ReadOptions {
- enum ReadConsistency {
- DEFAULT = 0;
- STRONG = 1;
- EVENTUAL = 2;
- }
-
- // The read consistency to use.
- // Cannot be set when transaction is set.
- // Lookup and ancestor queries default to STRONG, global queries default to
- // EVENTUAL and cannot be set to STRONG.
- optional ReadConsistency read_consistency = 1 [default=DEFAULT];
-
- // The transaction to use. Optional.
- optional bytes /* serialized Transaction */ transaction = 2;
-}
-
-// The request for Lookup.
-message LookupRequest {
-
- // Options for this lookup request. Optional.
- optional ReadOptions read_options = 1;
- // Keys of entities to look up from the datastore.
- repeated Key key = 3;
-}
-
-// The response for Lookup.
-message LookupResponse {
-
- // The order of results in these fields is undefined and has no relation to
- // the order of the keys in the input.
-
- // Entities found as ResultType.FULL entities.
- repeated EntityResult found = 1;
-
- // Entities not found as ResultType.KEY_ONLY entities.
- repeated EntityResult missing = 2;
-
- // A list of keys that were not looked up due to resource constraints.
- repeated Key deferred = 3;
-}
-
-
-// The request for RunQuery.
-message RunQueryRequest {
-
- // The options for this query.
- optional ReadOptions read_options = 1;
-
- // Entities are partitioned into subsets, identified by a dataset (usually
- // implicitly specified by the project) and namespace ID. Queries are scoped
- // to a single partition.
- // This partition ID is normalized with the standard default context
- // partition ID, but all other partition IDs in RunQueryRequest are
- // normalized with this partition ID as the context partition ID.
- optional PartitionId partition_id = 2;
-
- // The query to run.
- // Either this field or field gql_query must be set, but not both.
- optional Query query = 3;
- // The GQL query to run.
- // Either this field or field query must be set, but not both.
- optional GqlQuery gql_query = 7;
-}
-
-// The response for RunQuery.
-message RunQueryResponse {
-
- // A batch of query results (always present).
- optional QueryResultBatch batch = 1;
-
-}
-
-// The request for BeginTransaction.
-message BeginTransactionRequest {
-
- enum IsolationLevel {
- SNAPSHOT = 0; // Read from a consistent snapshot. Concurrent transactions
- // conflict if their mutations conflict. For example:
- // Read(A),Write(B) may not conflict with Read(B),Write(A),
- // but Read(B),Write(B) does conflict with Read(B),Write(B).
- SERIALIZABLE = 1; // Read from a consistent snapshot. Concurrent
- // transactions conflict if they cannot be serialized.
- // For example Read(A),Write(B) does conflict with
- // Read(B),Write(A) but Read(A) may not conflict with
- // Write(A).
- }
-
- // The transaction isolation level.
- optional IsolationLevel isolation_level = 1 [default=SNAPSHOT];
-}
-
-// The response for BeginTransaction.
-message BeginTransactionResponse {
-
- // The transaction identifier (always present).
- optional bytes /* serialized Transaction */ transaction = 1;
-}
-
-// The request for Rollback.
-message RollbackRequest {
-
- // The transaction identifier, returned by a call to
- // beginTransaction
.
- required bytes /* serialized Transaction */ transaction = 1;
-}
-
-// The response for Rollback.
-message RollbackResponse {
-// Empty
-}
-
-// The request for Commit.
-message CommitRequest {
-
- enum Mode {
- TRANSACTIONAL = 1;
- NON_TRANSACTIONAL = 2;
- }
-
- // The transaction identifier, returned by a call to
- // beginTransaction
. Must be set when mode is TRANSACTIONAL.
- optional bytes /* serialized Transaction */ transaction = 1;
- // The mutation to perform. Optional.
- optional Mutation mutation = 2;
- // The type of commit to perform. Either TRANSACTIONAL or NON_TRANSACTIONAL.
- optional Mode mode = 5 [default=TRANSACTIONAL];
-}
-
-// The response for Commit.
-message CommitResponse {
-
- // The result of performing the mutation (if any).
- optional MutationResult mutation_result = 1;
-}
-
-// The request for AllocateIds.
-message AllocateIdsRequest {
-
- // A list of keys with incomplete key paths to allocate IDs for.
- // No key may be reserved/read-only.
- repeated Key key = 1;
-}
-
-// The response for AllocateIds.
-message AllocateIdsResponse {
-
- // The keys specified in the request (in the same order), each with
- // its key path completed with a newly allocated ID.
- repeated Key key = 1;
-}
-
-// Each rpc normalizes the partition IDs of the keys in its input entities,
-// and always returns entities with keys with normalized partition IDs.
-// (Note that applies to all entities, including entities in values.)
-service DatastoreService {
- // Look up some entities by key.
- rpc Lookup(LookupRequest) returns (LookupResponse) {
- };
- // Query for entities.
- rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) {
- };
- // Begin a new transaction.
- rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
- };
- // Commit a transaction, optionally creating, deleting or modifying some
- // entities.
- rpc Commit(CommitRequest) returns (CommitResponse) {
- };
- // Roll back a transaction.
- rpc Rollback(RollbackRequest) returns (RollbackResponse) {
- };
- // Allocate IDs for incomplete keys (useful for referencing an entity before
- // it is inserted).
- rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) {
- };
-}
diff --git a/gcloud/datastore/_datastore_v1_pb2.py b/gcloud/datastore/_datastore_v1_pb2.py
deleted file mode 100644
index e31b4baec791..000000000000
--- a/gcloud/datastore/_datastore_v1_pb2.py
+++ /dev/null
@@ -1,1953 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: _datastore_v1.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name='_datastore_v1.proto',
- package='api.services.datastore',
- serialized_pb=_b('\n\x13_datastore_v1.proto\x12\x16\x61pi.services.datastore\"4\n\x0bPartitionId\x12\x12\n\ndataset_id\x18\x03 \x01(\t\x12\x11\n\tnamespace\x18\x04 \x01(\t\"\xb6\x01\n\x03Key\x12\x39\n\x0cpartition_id\x18\x01 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12=\n\x0cpath_element\x18\x02 \x03(\x0b\x32\'.api.services.datastore.Key.PathElement\x1a\x35\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xf4\x02\n\x05Value\x12\x15\n\rboolean_value\x18\x01 \x01(\x08\x12\x15\n\rinteger_value\x18\x02 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x03 \x01(\x01\x12$\n\x1ctimestamp_microseconds_value\x18\x04 \x01(\x03\x12.\n\tkey_value\x18\x05 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x16\n\x0e\x62lob_key_value\x18\x10 \x01(\t\x12\x14\n\x0cstring_value\x18\x11 \x01(\t\x12\x12\n\nblob_value\x18\x12 \x01(\x0c\x12\x34\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1e.api.services.datastore.Entity\x12\x31\n\nlist_value\x18\x07 \x03(\x0b\x32\x1d.api.services.datastore.Value\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x15\n\x07indexed\x18\x0f \x01(\x08:\x04true\"F\n\x08Property\x12\x0c\n\x04name\x18\x01 \x02(\t\x12,\n\x05value\x18\x04 \x02(\x0b\x32\x1d.api.services.datastore.Value\"f\n\x06\x45ntity\x12(\n\x03key\x18\x01 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x32\n\x08property\x18\x02 \x03(\x0b\x32 .api.services.datastore.Property\"t\n\x0c\x45ntityResult\x12.\n\x06\x65ntity\x18\x01 \x02(\x0b\x32\x1e.api.services.datastore.Entity\"4\n\nResultType\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xec\x02\n\x05Query\x12>\n\nprojection\x18\x02 \x03(\x0b\x32*.api.services.datastore.PropertyExpression\x12\x34\n\x04kind\x18\x03 \x03(\x0b\x32&.api.services.datastore.KindExpression\x12.\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1e.api.services.datastore.Filter\x12\x34\n\x05order\x18\x05 \x03(\x0b\x32%.api.services.datastore.PropertyOrder\x12;\n\x08group_by\x18\x06 \x03(\x0b\x32).api.services.datastore.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x11\n\x06offset\x18\n \x01(\x05:\x01\x30\x12\r\n\x05limit\x18\x0b \x01(\x05\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x02(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x02(\t\"\xd1\x01\n\x12PropertyExpression\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\\\n\x14\x61ggregation_function\x18\x02 \x01(\x0e\x32>.api.services.datastore.PropertyExpression.AggregationFunction\" \n\x13\x41ggregationFunction\x12\t\n\x05\x46IRST\x10\x01\"\xc7\x01\n\rPropertyOrder\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12M\n\tdirection\x18\x02 \x01(\x0e\x32/.api.services.datastore.PropertyOrder.Direction:\tASCENDING\"*\n\tDirection\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x8c\x01\n\x06\x46ilter\x12\x41\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\'.api.services.datastore.CompositeFilter\x12?\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32&.api.services.datastore.PropertyFilter\"\x9a\x01\n\x0f\x43ompositeFilter\x12\x42\n\x08operator\x18\x01 \x02(\x0e\x32\x30.api.services.datastore.CompositeFilter.Operator\x12.\n\x06\x66ilter\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Filter\"\x13\n\x08Operator\x12\x07\n\x03\x41ND\x10\x01\"\xbb\x02\n\x0ePropertyFilter\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\x41\n\x08operator\x18\x02 \x02(\x0e\x32/.api.services.datastore.PropertyFilter.Operator\x12,\n\x05value\x18\x03 \x02(\x0b\x32\x1d.api.services.datastore.Value\"{\n\x08Operator\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xae\x01\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x02(\t\x12\x1c\n\rallow_literal\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x35\n\x08name_arg\x18\x03 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\x12\x37\n\nnumber_arg\x18\x04 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\"Y\n\x0bGqlQueryArg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.api.services.datastore.Value\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"\xf1\x02\n\x10QueryResultBatch\x12K\n\x12\x65ntity_result_type\x18\x01 \x02(\x0e\x32/.api.services.datastore.EntityResult.ResultType\x12;\n\rentity_result\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12N\n\x0cmore_results\x18\x05 \x02(\x0e\x32\x38.api.services.datastore.QueryResultBatch.MoreResultsType\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\"V\n\x0fMoreResultsType\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\"\x8e\x02\n\x08Mutation\x12.\n\x06upsert\x18\x01 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06update\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06insert\x18\x03 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12\x36\n\x0einsert_auto_id\x18\x04 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12+\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\x1b.api.services.datastore.Key\x12\r\n\x05\x66orce\x18\x06 \x01(\x08\"`\n\x0eMutationResult\x12\x15\n\rindex_updates\x18\x01 \x02(\x05\x12\x37\n\x12insert_auto_id_key\x18\x02 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xb4\x01\n\x0bReadOptions\x12V\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x33.api.services.datastore.ReadOptions.ReadConsistency:\x07\x44\x45\x46\x41ULT\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\"8\n\x0fReadConsistency\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\"t\n\rLookupRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12(\n\x03key\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xab\x01\n\x0eLookupResponse\x12\x33\n\x05\x66ound\x18\x01 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x35\n\x07missing\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12-\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xea\x01\n\x0fRunQueryRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12\x39\n\x0cpartition_id\x18\x02 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12,\n\x05query\x18\x03 \x01(\x0b\x32\x1d.api.services.datastore.Query\x12\x33\n\tgql_query\x18\x07 \x01(\x0b\x32 .api.services.datastore.GqlQuery\"K\n\x10RunQueryResponse\x12\x37\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32(.api.services.datastore.QueryResultBatch\"\xae\x01\n\x17\x42\x65ginTransactionRequest\x12\x61\n\x0fisolation_level\x18\x01 \x01(\x0e\x32>.api.services.datastore.BeginTransactionRequest.IsolationLevel:\x08SNAPSHOT\"0\n\x0eIsolationLevel\x12\x0c\n\x08SNAPSHOT\x10\x00\x12\x10\n\x0cSERIALIZABLE\x10\x01\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"&\n\x0fRollbackRequest\x12\x13\n\x0btransaction\x18\x01 \x02(\x0c\"\x12\n\x10RollbackResponse\"\xd3\x01\n\rCommitRequest\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\x12\x32\n\x08mutation\x18\x02 \x01(\x0b\x32 .api.services.datastore.Mutation\x12G\n\x04mode\x18\x05 \x01(\x0e\x32*.api.services.datastore.CommitRequest.Mode:\rTRANSACTIONAL\"0\n\x04Mode\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\"Q\n\x0e\x43ommitResponse\x12?\n\x0fmutation_result\x18\x01 \x01(\x0b\x32&.api.services.datastore.MutationResult\">\n\x12\x41llocateIdsRequest\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key\"?\n\x13\x41llocateIdsResponse\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key2\xed\x04\n\x10\x44\x61tastoreService\x12Y\n\x06Lookup\x12%.api.services.datastore.LookupRequest\x1a&.api.services.datastore.LookupResponse\"\x00\x12_\n\x08RunQuery\x12\'.api.services.datastore.RunQueryRequest\x1a(.api.services.datastore.RunQueryResponse\"\x00\x12w\n\x10\x42\x65ginTransaction\x12/.api.services.datastore.BeginTransactionRequest\x1a\x30.api.services.datastore.BeginTransactionResponse\"\x00\x12Y\n\x06\x43ommit\x12%.api.services.datastore.CommitRequest\x1a&.api.services.datastore.CommitResponse\"\x00\x12_\n\x08Rollback\x12\'.api.services.datastore.RollbackRequest\x1a(.api.services.datastore.RollbackResponse\"\x00\x12h\n\x0b\x41llocateIds\x12*.api.services.datastore.AllocateIdsRequest\x1a+.api.services.datastore.AllocateIdsResponse\"\x00\x42#\n!com.google.api.services.datastore')
-)
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-
-
-_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor(
- name='ResultType',
- full_name='api.services.datastore.EntityResult.ResultType',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='FULL', index=0, number=1,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='PROJECTION', index=1, number=2,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='KEY_ONLY', index=2, number=3,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=901,
- serialized_end=953,
-)
-_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE)
-
-_PROPERTYEXPRESSION_AGGREGATIONFUNCTION = _descriptor.EnumDescriptor(
- name='AggregationFunction',
- full_name='api.services.datastore.PropertyExpression.AggregationFunction',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='FIRST', index=0, number=1,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=1567,
- serialized_end=1599,
-)
-_sym_db.RegisterEnumDescriptor(_PROPERTYEXPRESSION_AGGREGATIONFUNCTION)
-
-_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor(
- name='Direction',
- full_name='api.services.datastore.PropertyOrder.Direction',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='ASCENDING', index=0, number=1,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='DESCENDING', index=1, number=2,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=1759,
- serialized_end=1801,
-)
-_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION)
-
-_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor(
- name='Operator',
- full_name='api.services.datastore.CompositeFilter.Operator',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='AND', index=0, number=1,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=2082,
- serialized_end=2101,
-)
-_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR)
-
-_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor(
- name='Operator',
- full_name='api.services.datastore.PropertyFilter.Operator',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='LESS_THAN', index=0, number=1,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='LESS_THAN_OR_EQUAL', index=1, number=2,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='GREATER_THAN', index=2, number=3,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='GREATER_THAN_OR_EQUAL', index=3, number=4,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='EQUAL', index=4, number=5,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='HAS_ANCESTOR', index=5, number=11,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=2296,
- serialized_end=2419,
-)
-_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR)
-
-_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor(
- name='MoreResultsType',
- full_name='api.services.datastore.QueryResultBatch.MoreResultsType',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='NOT_FINISHED', index=0, number=1,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='MORE_RESULTS_AFTER_LIMIT', index=1, number=2,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='NO_MORE_RESULTS', index=2, number=3,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=2973,
- serialized_end=3059,
-)
-_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE)
-
-_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor(
- name='ReadConsistency',
- full_name='api.services.datastore.ReadOptions.ReadConsistency',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='DEFAULT', index=0, number=0,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='STRONG', index=1, number=1,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='EVENTUAL', index=2, number=2,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=3557,
- serialized_end=3613,
-)
-_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY)
-
-_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL = _descriptor.EnumDescriptor(
- name='IsolationLevel',
- full_name='api.services.datastore.BeginTransactionRequest.IsolationLevel',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='SNAPSHOT', index=0, number=0,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='SERIALIZABLE', index=1, number=1,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=4348,
- serialized_end=4396,
-)
-_sym_db.RegisterEnumDescriptor(_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL)
-
-_COMMITREQUEST_MODE = _descriptor.EnumDescriptor(
- name='Mode',
- full_name='api.services.datastore.CommitRequest.Mode',
- filename=None,
- file=DESCRIPTOR,
- values=[
- _descriptor.EnumValueDescriptor(
- name='TRANSACTIONAL', index=0, number=1,
- options=None,
- type=None),
- _descriptor.EnumValueDescriptor(
- name='NON_TRANSACTIONAL', index=1, number=2,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=4671,
- serialized_end=4719,
-)
-_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE)
-
-
-_PARTITIONID = _descriptor.Descriptor(
- name='PartitionId',
- full_name='api.services.datastore.PartitionId',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='dataset_id', full_name='api.services.datastore.PartitionId.dataset_id', index=0,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='namespace', full_name='api.services.datastore.PartitionId.namespace', index=1,
- number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=47,
- serialized_end=99,
-)
-
-
-_KEY_PATHELEMENT = _descriptor.Descriptor(
- name='PathElement',
- full_name='api.services.datastore.Key.PathElement',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='kind', full_name='api.services.datastore.Key.PathElement.kind', index=0,
- number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='id', full_name='api.services.datastore.Key.PathElement.id', index=1,
- number=2, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='name', full_name='api.services.datastore.Key.PathElement.name', index=2,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=231,
- serialized_end=284,
-)
-
-_KEY = _descriptor.Descriptor(
- name='Key',
- full_name='api.services.datastore.Key',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='partition_id', full_name='api.services.datastore.Key.partition_id', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='path_element', full_name='api.services.datastore.Key.path_element', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[_KEY_PATHELEMENT, ],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=102,
- serialized_end=284,
-)
-
-
-_VALUE = _descriptor.Descriptor(
- name='Value',
- full_name='api.services.datastore.Value',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='boolean_value', full_name='api.services.datastore.Value.boolean_value', index=0,
- number=1, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='integer_value', full_name='api.services.datastore.Value.integer_value', index=1,
- number=2, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='double_value', full_name='api.services.datastore.Value.double_value', index=2,
- number=3, type=1, cpp_type=5, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='timestamp_microseconds_value', full_name='api.services.datastore.Value.timestamp_microseconds_value', index=3,
- number=4, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='key_value', full_name='api.services.datastore.Value.key_value', index=4,
- number=5, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='blob_key_value', full_name='api.services.datastore.Value.blob_key_value', index=5,
- number=16, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='string_value', full_name='api.services.datastore.Value.string_value', index=6,
- number=17, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='blob_value', full_name='api.services.datastore.Value.blob_value', index=7,
- number=18, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='entity_value', full_name='api.services.datastore.Value.entity_value', index=8,
- number=6, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='list_value', full_name='api.services.datastore.Value.list_value', index=9,
- number=7, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='meaning', full_name='api.services.datastore.Value.meaning', index=10,
- number=14, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='indexed', full_name='api.services.datastore.Value.indexed', index=11,
- number=15, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=True,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=287,
- serialized_end=659,
-)
-
-
-_PROPERTY = _descriptor.Descriptor(
- name='Property',
- full_name='api.services.datastore.Property',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='api.services.datastore.Property.name', index=0,
- number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='value', full_name='api.services.datastore.Property.value', index=1,
- number=4, type=11, cpp_type=10, label=2,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=661,
- serialized_end=731,
-)
-
-
-_ENTITY = _descriptor.Descriptor(
- name='Entity',
- full_name='api.services.datastore.Entity',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='key', full_name='api.services.datastore.Entity.key', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='property', full_name='api.services.datastore.Entity.property', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=733,
- serialized_end=835,
-)
-
-
-_ENTITYRESULT = _descriptor.Descriptor(
- name='EntityResult',
- full_name='api.services.datastore.EntityResult',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='entity', full_name='api.services.datastore.EntityResult.entity', index=0,
- number=1, type=11, cpp_type=10, label=2,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _ENTITYRESULT_RESULTTYPE,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=837,
- serialized_end=953,
-)
-
-
-_QUERY = _descriptor.Descriptor(
- name='Query',
- full_name='api.services.datastore.Query',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='projection', full_name='api.services.datastore.Query.projection', index=0,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='kind', full_name='api.services.datastore.Query.kind', index=1,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='filter', full_name='api.services.datastore.Query.filter', index=2,
- number=4, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='order', full_name='api.services.datastore.Query.order', index=3,
- number=5, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='group_by', full_name='api.services.datastore.Query.group_by', index=4,
- number=6, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='start_cursor', full_name='api.services.datastore.Query.start_cursor', index=5,
- number=7, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='end_cursor', full_name='api.services.datastore.Query.end_cursor', index=6,
- number=8, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='offset', full_name='api.services.datastore.Query.offset', index=7,
- number=10, type=5, cpp_type=1, label=1,
- has_default_value=True, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='limit', full_name='api.services.datastore.Query.limit', index=8,
- number=11, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=956,
- serialized_end=1320,
-)
-
-
-_KINDEXPRESSION = _descriptor.Descriptor(
- name='KindExpression',
- full_name='api.services.datastore.KindExpression',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='api.services.datastore.KindExpression.name', index=0,
- number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=1322,
- serialized_end=1352,
-)
-
-
-_PROPERTYREFERENCE = _descriptor.Descriptor(
- name='PropertyReference',
- full_name='api.services.datastore.PropertyReference',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='api.services.datastore.PropertyReference.name', index=0,
- number=2, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=1354,
- serialized_end=1387,
-)
-
-
-_PROPERTYEXPRESSION = _descriptor.Descriptor(
- name='PropertyExpression',
- full_name='api.services.datastore.PropertyExpression',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='property', full_name='api.services.datastore.PropertyExpression.property', index=0,
- number=1, type=11, cpp_type=10, label=2,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='aggregation_function', full_name='api.services.datastore.PropertyExpression.aggregation_function', index=1,
- number=2, type=14, cpp_type=8, label=1,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _PROPERTYEXPRESSION_AGGREGATIONFUNCTION,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=1390,
- serialized_end=1599,
-)
-
-
-_PROPERTYORDER = _descriptor.Descriptor(
- name='PropertyOrder',
- full_name='api.services.datastore.PropertyOrder',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='property', full_name='api.services.datastore.PropertyOrder.property', index=0,
- number=1, type=11, cpp_type=10, label=2,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='direction', full_name='api.services.datastore.PropertyOrder.direction', index=1,
- number=2, type=14, cpp_type=8, label=1,
- has_default_value=True, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _PROPERTYORDER_DIRECTION,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=1602,
- serialized_end=1801,
-)
-
-
-_FILTER = _descriptor.Descriptor(
- name='Filter',
- full_name='api.services.datastore.Filter',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='composite_filter', full_name='api.services.datastore.Filter.composite_filter', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='property_filter', full_name='api.services.datastore.Filter.property_filter', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=1804,
- serialized_end=1944,
-)
-
-
-_COMPOSITEFILTER = _descriptor.Descriptor(
- name='CompositeFilter',
- full_name='api.services.datastore.CompositeFilter',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='operator', full_name='api.services.datastore.CompositeFilter.operator', index=0,
- number=1, type=14, cpp_type=8, label=2,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='filter', full_name='api.services.datastore.CompositeFilter.filter', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _COMPOSITEFILTER_OPERATOR,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=1947,
- serialized_end=2101,
-)
-
-
-_PROPERTYFILTER = _descriptor.Descriptor(
- name='PropertyFilter',
- full_name='api.services.datastore.PropertyFilter',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='property', full_name='api.services.datastore.PropertyFilter.property', index=0,
- number=1, type=11, cpp_type=10, label=2,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='operator', full_name='api.services.datastore.PropertyFilter.operator', index=1,
- number=2, type=14, cpp_type=8, label=2,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='value', full_name='api.services.datastore.PropertyFilter.value', index=2,
- number=3, type=11, cpp_type=10, label=2,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _PROPERTYFILTER_OPERATOR,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=2104,
- serialized_end=2419,
-)
-
-
-_GQLQUERY = _descriptor.Descriptor(
- name='GqlQuery',
- full_name='api.services.datastore.GqlQuery',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='query_string', full_name='api.services.datastore.GqlQuery.query_string', index=0,
- number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='allow_literal', full_name='api.services.datastore.GqlQuery.allow_literal', index=1,
- number=2, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='name_arg', full_name='api.services.datastore.GqlQuery.name_arg', index=2,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='number_arg', full_name='api.services.datastore.GqlQuery.number_arg', index=3,
- number=4, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=2422,
- serialized_end=2596,
-)
-
-
-_GQLQUERYARG = _descriptor.Descriptor(
- name='GqlQueryArg',
- full_name='api.services.datastore.GqlQueryArg',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='name', full_name='api.services.datastore.GqlQueryArg.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=_b("").decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='value', full_name='api.services.datastore.GqlQueryArg.value', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='cursor', full_name='api.services.datastore.GqlQueryArg.cursor', index=2,
- number=3, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=2598,
- serialized_end=2687,
-)
-
-
-_QUERYRESULTBATCH = _descriptor.Descriptor(
- name='QueryResultBatch',
- full_name='api.services.datastore.QueryResultBatch',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='entity_result_type', full_name='api.services.datastore.QueryResultBatch.entity_result_type', index=0,
- number=1, type=14, cpp_type=8, label=2,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='entity_result', full_name='api.services.datastore.QueryResultBatch.entity_result', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='end_cursor', full_name='api.services.datastore.QueryResultBatch.end_cursor', index=2,
- number=4, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='more_results', full_name='api.services.datastore.QueryResultBatch.more_results', index=3,
- number=5, type=14, cpp_type=8, label=2,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='skipped_results', full_name='api.services.datastore.QueryResultBatch.skipped_results', index=4,
- number=6, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _QUERYRESULTBATCH_MORERESULTSTYPE,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=2690,
- serialized_end=3059,
-)
-
-
-_MUTATION = _descriptor.Descriptor(
- name='Mutation',
- full_name='api.services.datastore.Mutation',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='upsert', full_name='api.services.datastore.Mutation.upsert', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='update', full_name='api.services.datastore.Mutation.update', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='insert', full_name='api.services.datastore.Mutation.insert', index=2,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='insert_auto_id', full_name='api.services.datastore.Mutation.insert_auto_id', index=3,
- number=4, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='delete', full_name='api.services.datastore.Mutation.delete', index=4,
- number=5, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='force', full_name='api.services.datastore.Mutation.force', index=5,
- number=6, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=3062,
- serialized_end=3332,
-)
-
-
-_MUTATIONRESULT = _descriptor.Descriptor(
- name='MutationResult',
- full_name='api.services.datastore.MutationResult',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='index_updates', full_name='api.services.datastore.MutationResult.index_updates', index=0,
- number=1, type=5, cpp_type=1, label=2,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='insert_auto_id_key', full_name='api.services.datastore.MutationResult.insert_auto_id_key', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=3334,
- serialized_end=3430,
-)
-
-
-_READOPTIONS = _descriptor.Descriptor(
- name='ReadOptions',
- full_name='api.services.datastore.ReadOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='read_consistency', full_name='api.services.datastore.ReadOptions.read_consistency', index=0,
- number=1, type=14, cpp_type=8, label=1,
- has_default_value=True, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='transaction', full_name='api.services.datastore.ReadOptions.transaction', index=1,
- number=2, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _READOPTIONS_READCONSISTENCY,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=3433,
- serialized_end=3613,
-)
-
-
-_LOOKUPREQUEST = _descriptor.Descriptor(
- name='LookupRequest',
- full_name='api.services.datastore.LookupRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='read_options', full_name='api.services.datastore.LookupRequest.read_options', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='key', full_name='api.services.datastore.LookupRequest.key', index=1,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=3615,
- serialized_end=3731,
-)
-
-
-_LOOKUPRESPONSE = _descriptor.Descriptor(
- name='LookupResponse',
- full_name='api.services.datastore.LookupResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='found', full_name='api.services.datastore.LookupResponse.found', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='missing', full_name='api.services.datastore.LookupResponse.missing', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='deferred', full_name='api.services.datastore.LookupResponse.deferred', index=2,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=3734,
- serialized_end=3905,
-)
-
-
-_RUNQUERYREQUEST = _descriptor.Descriptor(
- name='RunQueryRequest',
- full_name='api.services.datastore.RunQueryRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='read_options', full_name='api.services.datastore.RunQueryRequest.read_options', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='partition_id', full_name='api.services.datastore.RunQueryRequest.partition_id', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='query', full_name='api.services.datastore.RunQueryRequest.query', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='gql_query', full_name='api.services.datastore.RunQueryRequest.gql_query', index=3,
- number=7, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=3908,
- serialized_end=4142,
-)
-
-
-_RUNQUERYRESPONSE = _descriptor.Descriptor(
- name='RunQueryResponse',
- full_name='api.services.datastore.RunQueryResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='batch', full_name='api.services.datastore.RunQueryResponse.batch', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4144,
- serialized_end=4219,
-)
-
-
-_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor(
- name='BeginTransactionRequest',
- full_name='api.services.datastore.BeginTransactionRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='isolation_level', full_name='api.services.datastore.BeginTransactionRequest.isolation_level', index=0,
- number=1, type=14, cpp_type=8, label=1,
- has_default_value=True, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4222,
- serialized_end=4396,
-)
-
-
-_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor(
- name='BeginTransactionResponse',
- full_name='api.services.datastore.BeginTransactionResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='transaction', full_name='api.services.datastore.BeginTransactionResponse.transaction', index=0,
- number=1, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4398,
- serialized_end=4445,
-)
-
-
-_ROLLBACKREQUEST = _descriptor.Descriptor(
- name='RollbackRequest',
- full_name='api.services.datastore.RollbackRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='transaction', full_name='api.services.datastore.RollbackRequest.transaction', index=0,
- number=1, type=12, cpp_type=9, label=2,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4447,
- serialized_end=4485,
-)
-
-
-_ROLLBACKRESPONSE = _descriptor.Descriptor(
- name='RollbackResponse',
- full_name='api.services.datastore.RollbackResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4487,
- serialized_end=4505,
-)
-
-
-_COMMITREQUEST = _descriptor.Descriptor(
- name='CommitRequest',
- full_name='api.services.datastore.CommitRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='transaction', full_name='api.services.datastore.CommitRequest.transaction', index=0,
- number=1, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=_b(""),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='mutation', full_name='api.services.datastore.CommitRequest.mutation', index=1,
- number=2, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- _descriptor.FieldDescriptor(
- name='mode', full_name='api.services.datastore.CommitRequest.mode', index=2,
- number=5, type=14, cpp_type=8, label=1,
- has_default_value=True, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _COMMITREQUEST_MODE,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4508,
- serialized_end=4719,
-)
-
-
-_COMMITRESPONSE = _descriptor.Descriptor(
- name='CommitResponse',
- full_name='api.services.datastore.CommitResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='mutation_result', full_name='api.services.datastore.CommitResponse.mutation_result', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4721,
- serialized_end=4802,
-)
-
-
-_ALLOCATEIDSREQUEST = _descriptor.Descriptor(
- name='AllocateIdsRequest',
- full_name='api.services.datastore.AllocateIdsRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='key', full_name='api.services.datastore.AllocateIdsRequest.key', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4804,
- serialized_end=4866,
-)
-
-
-_ALLOCATEIDSRESPONSE = _descriptor.Descriptor(
- name='AllocateIdsResponse',
- full_name='api.services.datastore.AllocateIdsResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='key', full_name='api.services.datastore.AllocateIdsResponse.key', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=4868,
- serialized_end=4931,
-)
-
-_KEY_PATHELEMENT.containing_type = _KEY
-_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID
-_KEY.fields_by_name['path_element'].message_type = _KEY_PATHELEMENT
-_VALUE.fields_by_name['key_value'].message_type = _KEY
-_VALUE.fields_by_name['entity_value'].message_type = _ENTITY
-_VALUE.fields_by_name['list_value'].message_type = _VALUE
-_PROPERTY.fields_by_name['value'].message_type = _VALUE
-_ENTITY.fields_by_name['key'].message_type = _KEY
-_ENTITY.fields_by_name['property'].message_type = _PROPERTY
-_ENTITYRESULT.fields_by_name['entity'].message_type = _ENTITY
-_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT
-_QUERY.fields_by_name['projection'].message_type = _PROPERTYEXPRESSION
-_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION
-_QUERY.fields_by_name['filter'].message_type = _FILTER
-_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER
-_QUERY.fields_by_name['group_by'].message_type = _PROPERTYREFERENCE
-_PROPERTYEXPRESSION.fields_by_name['property'].message_type = _PROPERTYREFERENCE
-_PROPERTYEXPRESSION.fields_by_name['aggregation_function'].enum_type = _PROPERTYEXPRESSION_AGGREGATIONFUNCTION
-_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION
-_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
-_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION
-_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER
-_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER
-_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER
-_COMPOSITEFILTER.fields_by_name['operator'].enum_type = _COMPOSITEFILTER_OPERATOR
-_COMPOSITEFILTER.fields_by_name['filter'].message_type = _FILTER
-_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER
-_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
-_PROPERTYFILTER.fields_by_name['operator'].enum_type = _PROPERTYFILTER_OPERATOR
-_PROPERTYFILTER.fields_by_name['value'].message_type = _VALUE
-_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER
-_GQLQUERY.fields_by_name['name_arg'].message_type = _GQLQUERYARG
-_GQLQUERY.fields_by_name['number_arg'].message_type = _GQLQUERYARG
-_GQLQUERYARG.fields_by_name['value'].message_type = _VALUE
-_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE
-_QUERYRESULTBATCH.fields_by_name['entity_result'].message_type = _ENTITYRESULT
-_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE
-_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH
-_MUTATION.fields_by_name['upsert'].message_type = _ENTITY
-_MUTATION.fields_by_name['update'].message_type = _ENTITY
-_MUTATION.fields_by_name['insert'].message_type = _ENTITY
-_MUTATION.fields_by_name['insert_auto_id'].message_type = _ENTITY
-_MUTATION.fields_by_name['delete'].message_type = _KEY
-_MUTATIONRESULT.fields_by_name['insert_auto_id_key'].message_type = _KEY
-_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY
-_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS
-_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
-_LOOKUPREQUEST.fields_by_name['key'].message_type = _KEY
-_LOOKUPRESPONSE.fields_by_name['found'].message_type = _ENTITYRESULT
-_LOOKUPRESPONSE.fields_by_name['missing'].message_type = _ENTITYRESULT
-_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = _KEY
-_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
-_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = _PARTITIONID
-_RUNQUERYREQUEST.fields_by_name['query'].message_type = _QUERY
-_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = _GQLQUERY
-_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = _QUERYRESULTBATCH
-_BEGINTRANSACTIONREQUEST.fields_by_name['isolation_level'].enum_type = _BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL
-_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST
-_COMMITREQUEST.fields_by_name['mutation'].message_type = _MUTATION
-_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE
-_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST
-_COMMITRESPONSE.fields_by_name['mutation_result'].message_type = _MUTATIONRESULT
-_ALLOCATEIDSREQUEST.fields_by_name['key'].message_type = _KEY
-_ALLOCATEIDSRESPONSE.fields_by_name['key'].message_type = _KEY
-DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID
-DESCRIPTOR.message_types_by_name['Key'] = _KEY
-DESCRIPTOR.message_types_by_name['Value'] = _VALUE
-DESCRIPTOR.message_types_by_name['Property'] = _PROPERTY
-DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY
-DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT
-DESCRIPTOR.message_types_by_name['Query'] = _QUERY
-DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION
-DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE
-DESCRIPTOR.message_types_by_name['PropertyExpression'] = _PROPERTYEXPRESSION
-DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER
-DESCRIPTOR.message_types_by_name['Filter'] = _FILTER
-DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER
-DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER
-DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY
-DESCRIPTOR.message_types_by_name['GqlQueryArg'] = _GQLQUERYARG
-DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH
-DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION
-DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT
-DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS
-DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST
-DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE
-DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST
-DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE
-DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST
-DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE
-DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST
-DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE
-DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST
-DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE
-DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST
-DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE
-
-PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict(
- DESCRIPTOR = _PARTITIONID,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.PartitionId)
- ))
-_sym_db.RegisterMessage(PartitionId)
-
-Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict(
-
- PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict(
- DESCRIPTOR = _KEY_PATHELEMENT,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Key.PathElement)
- ))
- ,
- DESCRIPTOR = _KEY,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Key)
- ))
-_sym_db.RegisterMessage(Key)
-_sym_db.RegisterMessage(Key.PathElement)
-
-Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict(
- DESCRIPTOR = _VALUE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Value)
- ))
-_sym_db.RegisterMessage(Value)
-
-Property = _reflection.GeneratedProtocolMessageType('Property', (_message.Message,), dict(
- DESCRIPTOR = _PROPERTY,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Property)
- ))
-_sym_db.RegisterMessage(Property)
-
-Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict(
- DESCRIPTOR = _ENTITY,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Entity)
- ))
-_sym_db.RegisterMessage(Entity)
-
-EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict(
- DESCRIPTOR = _ENTITYRESULT,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.EntityResult)
- ))
-_sym_db.RegisterMessage(EntityResult)
-
-Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict(
- DESCRIPTOR = _QUERY,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Query)
- ))
-_sym_db.RegisterMessage(Query)
-
-KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict(
- DESCRIPTOR = _KINDEXPRESSION,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.KindExpression)
- ))
-_sym_db.RegisterMessage(KindExpression)
-
-PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict(
- DESCRIPTOR = _PROPERTYREFERENCE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyReference)
- ))
-_sym_db.RegisterMessage(PropertyReference)
-
-PropertyExpression = _reflection.GeneratedProtocolMessageType('PropertyExpression', (_message.Message,), dict(
- DESCRIPTOR = _PROPERTYEXPRESSION,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyExpression)
- ))
-_sym_db.RegisterMessage(PropertyExpression)
-
-PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict(
- DESCRIPTOR = _PROPERTYORDER,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyOrder)
- ))
-_sym_db.RegisterMessage(PropertyOrder)
-
-Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict(
- DESCRIPTOR = _FILTER,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Filter)
- ))
-_sym_db.RegisterMessage(Filter)
-
-CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict(
- DESCRIPTOR = _COMPOSITEFILTER,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.CompositeFilter)
- ))
-_sym_db.RegisterMessage(CompositeFilter)
-
-PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict(
- DESCRIPTOR = _PROPERTYFILTER,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.PropertyFilter)
- ))
-_sym_db.RegisterMessage(PropertyFilter)
-
-GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict(
- DESCRIPTOR = _GQLQUERY,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQuery)
- ))
-_sym_db.RegisterMessage(GqlQuery)
-
-GqlQueryArg = _reflection.GeneratedProtocolMessageType('GqlQueryArg', (_message.Message,), dict(
- DESCRIPTOR = _GQLQUERYARG,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.GqlQueryArg)
- ))
-_sym_db.RegisterMessage(GqlQueryArg)
-
-QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict(
- DESCRIPTOR = _QUERYRESULTBATCH,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.QueryResultBatch)
- ))
-_sym_db.RegisterMessage(QueryResultBatch)
-
-Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict(
- DESCRIPTOR = _MUTATION,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.Mutation)
- ))
-_sym_db.RegisterMessage(Mutation)
-
-MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict(
- DESCRIPTOR = _MUTATIONRESULT,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.MutationResult)
- ))
-_sym_db.RegisterMessage(MutationResult)
-
-ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict(
- DESCRIPTOR = _READOPTIONS,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.ReadOptions)
- ))
-_sym_db.RegisterMessage(ReadOptions)
-
-LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict(
- DESCRIPTOR = _LOOKUPREQUEST,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.LookupRequest)
- ))
-_sym_db.RegisterMessage(LookupRequest)
-
-LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict(
- DESCRIPTOR = _LOOKUPRESPONSE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.LookupResponse)
- ))
-_sym_db.RegisterMessage(LookupResponse)
-
-RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict(
- DESCRIPTOR = _RUNQUERYREQUEST,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryRequest)
- ))
-_sym_db.RegisterMessage(RunQueryRequest)
-
-RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict(
- DESCRIPTOR = _RUNQUERYRESPONSE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryResponse)
- ))
-_sym_db.RegisterMessage(RunQueryResponse)
-
-BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict(
- DESCRIPTOR = _BEGINTRANSACTIONREQUEST,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionRequest)
- ))
-_sym_db.RegisterMessage(BeginTransactionRequest)
-
-BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict(
- DESCRIPTOR = _BEGINTRANSACTIONRESPONSE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionResponse)
- ))
-_sym_db.RegisterMessage(BeginTransactionResponse)
-
-RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict(
- DESCRIPTOR = _ROLLBACKREQUEST,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackRequest)
- ))
-_sym_db.RegisterMessage(RollbackRequest)
-
-RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict(
- DESCRIPTOR = _ROLLBACKRESPONSE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.RollbackResponse)
- ))
-_sym_db.RegisterMessage(RollbackResponse)
-
-CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict(
- DESCRIPTOR = _COMMITREQUEST,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.CommitRequest)
- ))
-_sym_db.RegisterMessage(CommitRequest)
-
-CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict(
- DESCRIPTOR = _COMMITRESPONSE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.CommitResponse)
- ))
-_sym_db.RegisterMessage(CommitResponse)
-
-AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict(
- DESCRIPTOR = _ALLOCATEIDSREQUEST,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsRequest)
- ))
-_sym_db.RegisterMessage(AllocateIdsRequest)
-
-AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict(
- DESCRIPTOR = _ALLOCATEIDSRESPONSE,
- __module__ = '_datastore_v1_pb2'
- # @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsResponse)
- ))
-_sym_db.RegisterMessage(AllocateIdsResponse)
-
-
-DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n!com.google.api.services.datastore'))
-# @@protoc_insertion_point(module_scope)
diff --git a/gcloud/datastore/_generated/_datastore.proto b/gcloud/datastore/_generated/_datastore.proto
new file mode 100644
index 000000000000..6f6aedb39d8b
--- /dev/null
+++ b/gcloud/datastore/_generated/_datastore.proto
@@ -0,0 +1,289 @@
+// Copyright (c) 2015, Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.datastore.v1beta3;
+
+import "google/api/annotations.proto";
+import "google/datastore/v1beta3/entity.proto";
+import "google/datastore/v1beta3/query.proto";
+
+option java_multiple_files = true;
+option java_outer_classname = "DatastoreProto";
+option java_package = "com.google.datastore.v1beta3";
+
+
+// Each RPC normalizes the partition IDs of the keys in its input entities,
+// and always returns entities with keys with normalized partition IDs.
+// This applies to all keys and entities, including those in values, except keys
+// with both an empty path and an empty or unset partition ID. Normalization of
+// input keys sets the project ID (if not already set) to the project ID from
+// the request.
+//
+service Datastore {
+ // Look up entities by key.
+ rpc Lookup(LookupRequest) returns (LookupResponse) {
+ option (google.api.http) = { post: "/v1beta3/projects/{project_id}:lookup" body: "*" };
+ }
+
+ // Query for entities.
+ rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) {
+ option (google.api.http) = { post: "/v1beta3/projects/{project_id}:runQuery" body: "*" };
+ }
+
+ // Begin a new transaction.
+ rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
+ option (google.api.http) = { post: "/v1beta3/projects/{project_id}:beginTransaction" body: "*" };
+ }
+
+ // Commit a transaction, optionally creating, deleting or modifying some
+ // entities.
+ rpc Commit(CommitRequest) returns (CommitResponse) {
+ option (google.api.http) = { post: "/v1beta3/projects/{project_id}:commit" body: "*" };
+ }
+
+ // Roll back a transaction.
+ rpc Rollback(RollbackRequest) returns (RollbackResponse) {
+ option (google.api.http) = { post: "/v1beta3/projects/{project_id}:rollback" body: "*" };
+ }
+
+ // Allocate IDs for the given keys (useful for referencing an entity before
+ // it is inserted).
+ rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) {
+ option (google.api.http) = { post: "/v1beta3/projects/{project_id}:allocateIds" body: "*" };
+ }
+}
+
+// The request for [google.datastore.v1beta3.Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup].
+message LookupRequest {
+ // Project ID against which to make the request.
+ string project_id = 8;
+
+ // Options for this lookup request.
+ ReadOptions read_options = 1;
+
+ // Keys of entities to look up.
+ repeated Key keys = 3;
+}
+
+// The response for [google.datastore.v1beta3.Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup].
+message LookupResponse {
+ // Entities found as `ResultType.FULL` entities. The order of results in this
+ // field is undefined and has no relation to the order of the keys in the
+ // input.
+ repeated EntityResult found = 1;
+
+ // Entities not found as `ResultType.KEY_ONLY` entities. The order of results
+ // in this field is undefined and has no relation to the order of the keys
+ // in the input.
+ repeated EntityResult missing = 2;
+
+ // A list of keys that were not looked up due to resource constraints. The
+ // order of results in this field is undefined and has no relation to the
+ // order of the keys in the input.
+ repeated Key deferred = 3;
+}
+
+// The request for [google.datastore.v1beta3.Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery].
+message RunQueryRequest {
+ // Project ID against which to make the request.
+ string project_id = 8;
+
+ // Entities are partitioned into subsets, identified by a partition ID.
+ // Queries are scoped to a single partition.
+ // This partition ID is normalized with the standard default context
+ // partition ID.
+ PartitionId partition_id = 2;
+
+ // The options for this query.
+ ReadOptions read_options = 1;
+
+ // The type of query.
+ oneof query_type {
+ // The query to run.
+ Query query = 3;
+
+ // The GQL query to run.
+ GqlQuery gql_query = 7;
+ }
+}
+
+// The response for [google.datastore.v1beta3.Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery].
+message RunQueryResponse {
+ // A batch of query results (always present).
+ QueryResultBatch batch = 1;
+
+ // The parsed form of the `GqlQuery` from the request, if it was set.
+ Query query = 2;
+}
+
+// The request for [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
+message BeginTransactionRequest {
+ // Project ID against which to make the request.
+ string project_id = 8;
+}
+
+// The response for [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
+message BeginTransactionResponse {
+ // The transaction identifier (always present).
+ bytes transaction = 1;
+}
+
+// The request for [google.datastore.v1beta3.Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback].
+message RollbackRequest {
+ // Project ID against which to make the request.
+ string project_id = 8;
+
+ // The transaction identifier, returned by a call to
+ // [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
+ bytes transaction = 1;
+}
+
+// The response for [google.datastore.v1beta3.Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback]
+// (an empty message).
+message RollbackResponse {
+
+}
+
+// The request for [google.datastore.v1beta3.Datastore.Commit][google.datastore.v1beta3.Datastore.Commit].
+message CommitRequest {
+ // Commit modes.
+ enum Mode {
+ // Unspecified.
+ MODE_UNSPECIFIED = 0;
+
+ // Transactional.
+ TRANSACTIONAL = 1;
+
+ // Non-transactional.
+ NON_TRANSACTIONAL = 2;
+ }
+
+ // Project ID against which to make the request.
+ string project_id = 8;
+
+ // The type of commit to perform. Defaults to `TRANSACTIONAL`.
+ Mode mode = 5;
+
+ // Must be set when mode is `TRANSACTIONAL`.
+ oneof transaction_selector {
+ // The transaction in which to write.
+ bytes transaction = 1;
+ }
+
+ // The mutations to perform.
+ //
+ // When mode is `TRANSACTIONAL`, mutations affecting a single entity are
+ // applied in order. The following sequences of mutations affecting a single
+ // entity are not permitted in a single `Commit` request:
+ // - `insert` followed by `insert`
+ // - `update` followed by `insert`
+ // - `upsert` followed by `insert`
+ // - `delete` followed by `update`
+ //
+ // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single
+ // entity.
+ repeated Mutation mutations = 6;
+}
+
+// The response for [google.datastore.v1beta3.Datastore.Commit][google.datastore.v1beta3.Datastore.Commit].
+message CommitResponse {
+ // The result of performing the mutations.
+ // The i-th mutation result corresponds to the i-th mutation in the request.
+ repeated MutationResult mutation_results = 3;
+
+ // The number of index entries updated during the commit.
+ int32 index_updates = 4;
+}
+
+// The request for [google.datastore.v1beta3.Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds].
+message AllocateIdsRequest {
+ // Project ID against which to make the request.
+ string project_id = 8;
+
+ // A list of keys with incomplete key paths for which to allocate IDs.
+ // No key may be reserved/read-only.
+ repeated Key keys = 1;
+}
+
+// The response for [google.datastore.v1beta3.Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds].
+message AllocateIdsResponse {
+ // The keys specified in the request (in the same order), each with
+ // its key path completed with a newly allocated ID.
+ repeated Key keys = 1;
+}
+
+// A mutation to apply to an entity.
+message Mutation {
+ // The mutation operation.
+ //
+ // For `insert`, `update`, and `upsert`:
+ // - The entity's key must not be reserved/read-only.
+ // - No property in the entity may have a reserved name,
+ // not even a property in an entity in a value.
+ // - No value in the entity may have meaning 18,
+ // not even a value in an entity in another value.
+ oneof operation {
+ // The entity to insert. The entity must not already exist.
+ // The entity's key's final path element may be incomplete.
+ Entity insert = 4;
+
+ // The entity to update. The entity must already exist.
+ // Must have a complete key path.
+ Entity update = 5;
+
+ // The entity to upsert. The entity may or may not already exist.
+ // The entity's key's final path element may be incomplete.
+ Entity upsert = 6;
+
+ // The key of the entity to delete. The entity may or may not already exist.
+ // Must have a complete key path and must not be reserved/read-only.
+ Key delete = 7;
+ }
+}
+
+// The result of applying a mutation.
+message MutationResult {
+ // The automatically allocated key.
+ // Set only when the mutation allocated a key.
+ Key key = 3;
+}
+
+// Options shared by read requests.
+message ReadOptions {
+ // Read consistencies.
+ enum ReadConsistency {
+ // Unspecified.
+ READ_CONSISTENCY_UNSPECIFIED = 0;
+
+ // Strong consistency.
+ STRONG = 1;
+
+ // Eventual consistency.
+ EVENTUAL = 2;
+ }
+
+ // If not specified, lookups and ancestor queries default to
+ // `read_consistency`=`STRONG`, global queries default to
+ // `read_consistency`=`EVENTUAL`.
+ oneof consistency_type {
+ // The non-transactional read consistency to use.
+ // Cannot be set to `STRONG` for global queries.
+ ReadConsistency read_consistency = 1;
+
+ // The transaction in which to read.
+ bytes transaction = 2;
+ }
+}
diff --git a/gcloud/datastore/_generated/_entity.proto b/gcloud/datastore/_generated/_entity.proto
new file mode 100644
index 000000000000..12423eb419f6
--- /dev/null
+++ b/gcloud/datastore/_generated/_entity.proto
@@ -0,0 +1,196 @@
+// Copyright (c) 2015, Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.datastore.v1beta3;
+
+import "google/api/annotations.proto";
+import "google/protobuf/struct.proto";
+import "google/protobuf/timestamp.proto";
+import "google/type/latlng.proto";
+
+option java_multiple_files = true;
+option java_outer_classname = "EntityProto";
+option java_package = "com.google.datastore.v1beta3";
+
+
+// A partition ID identifies a grouping of entities. The grouping is always
+// by project and namespace, however the namespace ID may be empty.
+//
+// A partition ID contains several dimensions:
+// project ID and namespace ID.
+// Partition dimensions:
+// - A dimension may be `""`.
+// - A dimension must be valid UTF-8 bytes.
+// - A dimension's value must match regex `[A-Za-z\d\.\-_]{1,100}`
+// If the value of any dimension matches regex `__.*__`, the partition is
+// reserved/read-only.
+// A reserved/read-only partition ID is forbidden in certain documented
+// contexts.
+//
+// Foreign partition IDs (in which the project ID does
+// not match the context project ID ) are discouraged.
+// Reads and writes of foreign partition IDs may fail if the project is not in an active state.
+message PartitionId {
+ // Project ID.
+ string project_id = 2;
+
+ // Namespace ID.
+ string namespace_id = 4;
+}
+
+// A unique identifier for an entity.
+// If a key's partition id or any of its path kinds or names are
+// reserved/read-only, the key is reserved/read-only.
+// A reserved/read-only key is forbidden in certain documented contexts.
+message Key {
+ // A (kind, ID/name) pair used to construct a key path.
+ //
+ // If either name nor ID is set, the element is complete.
+ // If neither is set, the element is incomplete.
+ message PathElement {
+ // The kind of the entity.
+ // A kind matching regex `__.*__` is reserved/read-only.
+ // A kind must not contain more than 1500 bytes when UTF-8 encoded.
+ // Cannot be `""`.
+ string kind = 1;
+
+ // The type of id.
+ oneof id_type {
+ // The auto allocated ID of the entity.
+ // Never equal to zero. Values less than zero are discouraged and may not
+ // be supported in the future.
+ int64 id = 2;
+
+ // The name of the entity.
+ // A name matching regex `__.*__` is reserved/read-only.
+ // A name must not be more than 1500 bytes when UTF-8 encoded.
+ // Cannot be `""`.
+ string name = 3;
+ }
+ }
+
+ // Entities are partitioned into subsets, currently identified by a dataset
+ // (usually implicitly specified by the project) and namespace ID.
+ // Queries are scoped to a single partition.
+ PartitionId partition_id = 1;
+
+ // The entity path.
+ // An entity path consists of one or more elements composed of a kind and a
+ // string or numerical identifier, which identify entities. The first
+ // element identifies a _root entity_, the second element identifies
+ // a _child_ of the root entity, the third element a child of the
+ // second entity, and so forth. The entities identified by all prefixes of
+ // the path are called the element's _ancestors_.
+ // An entity path is always fully complete: *all* of the entity's ancestors
+ // are required to be in the path along with the entity identifier itself.
+ // The only exception is that in some documented cases, the identifier in the
+ // last path element (for the entity) itself may be omitted. A path can never
+ // be empty. The path can have at most 100 elements.
+ repeated PathElement path = 2;
+}
+
+// An array value.
+message ArrayValue {
+ // Values in the array.
+ // The order of this array may not be preserved if it contains a mix of
+ // indexed and unindexed values.
+ repeated Value values = 1;
+}
+
+// A message that can hold any of the supported value types and associated
+// metadata.
+message Value {
+ // Must have a value set.
+ oneof value_type {
+ // A null value.
+ google.protobuf.NullValue null_value = 11;
+
+ // A boolean value.
+ bool boolean_value = 1;
+
+ // An integer value.
+ int64 integer_value = 2;
+
+ // A double value.
+ double double_value = 3;
+
+ // A timestamp value.
+ // When stored in the Datastore, precise only to microseconds;
+ // any additional precision is rounded down.
+ google.protobuf.Timestamp timestamp_value = 10;
+
+ // A key value.
+ Key key_value = 5;
+
+ // A UTF-8 encoded string value.
+ // When `exclude_from_indexes` is false (it is indexed) and meaning is not
+ // 2, may have at most 1500 bytes.
+ // When meaning is 2, may have at most 2083 bytes.
+ // Otherwise, may be set to at least 1,000,000 bytes
+ string string_value = 17;
+
+ // A blob value.
+ // May have at most 1,000,000 bytes.
+ // When `exclude_from_indexes` is false, may have at most 1500 bytes.
+ // In JSON requests, must be base64-encoded.
+ bytes blob_value = 18;
+
+ // A geo point value representing a point on the surface of Earth.
+ google.type.LatLng geo_point_value = 8;
+
+ // An entity value.
+ // May have no key.
+ // May have a key with an incomplete key path.
+ // May have a reserved/read-only key.
+ Entity entity_value = 6;
+
+ // An array value.
+ // Cannot contain another array value.
+ // A `Value` instance that sets field `array_value` must not set fields
+ // `meaning` or `exclude_from_indexes`.
+ ArrayValue array_value = 9;
+ }
+
+ // The `meaning` field should only be populated for backwards compatibility.
+ int32 meaning = 14;
+
+ // If the value should be excluded from all indexes including those defined
+ // explicitly.
+ bool exclude_from_indexes = 19;
+}
+
+// An entity.
+//
+// An entity is limited to 1 megabyte when stored. That _roughly_
+// corresponds to a limit of 1 megabyte for the serialized form of this
+// message.
+message Entity {
+ // The entity's key.
+ //
+ // An entity must have a key, unless otherwise documented (for example,
+ // an entity in `Value.entity_value` may have no key).
+ // An entity's kind is its key's path's last element's kind,
+ // or null if it has no key.
+ Key key = 1;
+
+ // The entity's properties.
+ // The map's keys are property names.
+ // A property name matching regex `__.*__` is reserved.
+ // A reserved property name is forbidden in certain documented contexts.
+ // The name must not contain more than 500 characters.
+ // The name cannot be `""`.
+ map properties = 3;
+}
diff --git a/gcloud/datastore/_generated/_query.proto b/gcloud/datastore/_generated/_query.proto
new file mode 100644
index 000000000000..80cbb2045ebc
--- /dev/null
+++ b/gcloud/datastore/_generated/_query.proto
@@ -0,0 +1,281 @@
+// Copyright (c) 2015, Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.datastore.v1beta3;
+
+import "google/api/annotations.proto";
+import "google/datastore/v1beta3/entity.proto";
+import "google/protobuf/wrappers.proto";
+
+option java_multiple_files = true;
+option java_outer_classname = "QueryProto";
+option java_package = "com.google.datastore.v1beta3";
+
+
+// The result of fetching an entity from the datastore.
+message EntityResult {
+ // Specifies what data the 'entity' field contains.
+ // A `ResultType` is either implied (for example, in `LookupResponse.found`
+ // from `datastore.proto`, it is always `FULL`) or specified by context (for
+ // example, in message `QueryResultBatch`, field `entity_result_type`
+ // specifies a `ResultType` for all the values in field `entity_results`).
+ enum ResultType {
+ // Unspecified.
+ RESULT_TYPE_UNSPECIFIED = 0;
+
+ // The entire entity.
+ FULL = 1;
+
+ // A projected subset of properties. The entity may have no key. A property
+ // value may have meaning 18.
+ PROJECTION = 2;
+
+ // Only the key.
+ KEY_ONLY = 3;
+ }
+
+ // The resulting entity.
+ Entity entity = 1;
+
+ // A cursor that points to the position after the result entity.
+ // Set only when the `EntityResult` is part of a `QueryResultBatch` message.
+ bytes cursor = 3;
+}
+
+// A query.
+message Query {
+ // The projection to return. Defaults to returning all properties.
+ repeated Projection projection = 2;
+
+ // The kinds to query (if empty, returns entities of all kinds).
+ // Currently at most 1 kind may be specified.
+ repeated KindExpression kind = 3;
+
+ // The filter to apply.
+ Filter filter = 4;
+
+ // The order to apply to the query results (if empty, order is unspecified).
+ repeated PropertyOrder order = 5;
+
+ // The properties to make distinct. The query results will contain the first
+ // result for each distinct combination of values for the given properties
+ // (if empty, all results are returned).
+ repeated PropertyReference distinct_on = 6;
+
+ // A starting point for the query results. Query cursors are
+ // returned in query result batches.
+ bytes start_cursor = 7;
+
+ // An ending point for the query results. Query cursors are
+ // returned in query result batches.
+ bytes end_cursor = 8;
+
+ // The number of results to skip. Applies before limit, but after all other
+ // constraints.
+ // Must be >= 0.
+ int32 offset = 10;
+
+ // The maximum number of results to return. Applies after all other
+ // constraints.
+ // Unspecified is interpreted as no limit.
+ // Must be >= 0.
+ google.protobuf.Int32Value limit = 12;
+}
+
+// A representation of a kind.
+message KindExpression {
+ // The name of the kind.
+ string name = 1;
+}
+
+// A reference to a property relative to the kind expressions.
+message PropertyReference {
+ // The name of the property.
+ string name = 2;
+}
+
+// A representation of a property in a projection.
+message Projection {
+ // The property to project.
+ PropertyReference property = 1;
+}
+
+// The desired order for a specific property.
+message PropertyOrder {
+ // Direction.
+ enum Direction {
+ // Unspecified.
+ DIRECTION_UNSPECIFIED = 0;
+
+ // Ascending.
+ ASCENDING = 1;
+
+ // Descending.
+ DESCENDING = 2;
+ }
+
+ // The property to order by.
+ PropertyReference property = 1;
+
+ // The direction to order by. Defaults to `ASCENDING`.
+ Direction direction = 2;
+}
+
+// A holder for any type of filter.
+message Filter {
+ // The type of filter.
+ oneof filter_type {
+ // A composite filter.
+ CompositeFilter composite_filter = 1;
+
+ // A filter on a property.
+ PropertyFilter property_filter = 2;
+ }
+}
+
+// A filter that merges the multiple other filters using the given operator.
+message CompositeFilter {
+ // Composite filter operator.
+ enum Operator {
+ // Unspecified. This value must not be used.
+ OPERATOR_UNSPECIFIED = 0;
+
+ // And.
+ AND = 1;
+ }
+
+ // The operator for combining multiple filters.
+ Operator op = 1;
+
+ // The list of filters to combine.
+ // Must contain at least one filter.
+ repeated Filter filters = 2;
+}
+
+// A filter on a specific property.
+message PropertyFilter {
+ // Property filter operator.
+ enum Operator {
+ // Unspecified. This value must not be used.
+ OPERATOR_UNSPECIFIED = 0;
+
+ // Less than.
+ LESS_THAN = 1;
+
+ // Less than or equal.
+ LESS_THAN_OR_EQUAL = 2;
+
+ // Greater than.
+ GREATER_THAN = 3;
+
+ // Greater than or equal.
+ GREATER_THAN_OR_EQUAL = 4;
+
+ // Equal.
+ EQUAL = 5;
+
+ // Has ancestor.
+ HAS_ANCESTOR = 11;
+ }
+
+ // The property to filter by.
+ PropertyReference property = 1;
+
+ // The operator to filter by.
+ Operator op = 2;
+
+ // The value to compare the property to.
+ Value value = 3;
+}
+
+// A GQL query.
+message GqlQuery {
+ // A string of the format described
+ // [here](https://developers.google.com/datastore/docs/concepts/gql).
+ string query_string = 1;
+
+ // When false, the query string must not contain any literals and instead
+ // must bind all values. For example,
+ // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while
+ // `SELECT * FROM Kind WHERE a = @value` is.
+ bool allow_literals = 2;
+
+ // For each non-reserved named binding site in the query string,
+ // there must be a named parameter with that name,
+ // but not necessarily the inverse.
+ // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex
+ // `__.*__`, and must not be `""`.
+ map named_bindings = 5;
+
+ // Numbered binding site @1 references the first numbered parameter,
+ // effectively using 1-based indexing, rather than the usual 0.
+ // For each binding site numbered i in `query_string`,
+ // there must be an i-th numbered parameter.
+ // The inverse must also be true.
+ repeated GqlQueryParameter positional_bindings = 4;
+}
+
+// A binding parameter for a GQL query.
+message GqlQueryParameter {
+ // The type of parameter.
+ oneof parameter_type {
+ // Value.
+ Value value = 2;
+
+ // Cursor.
+ bytes cursor = 3;
+ }
+}
+
+// A batch of results produced by a query.
+message QueryResultBatch {
+ // The possible values for the `more_results` field.
+ enum MoreResultsType {
+ // Unspecified. This value is never used.
+ MORE_RESULTS_TYPE_UNSPECIFIED = 0;
+
+ // There may be additional batches to fetch from this query.
+ NOT_FINISHED = 1;
+
+ // The query is finished, but there may be more results after the limit.
+ MORE_RESULTS_AFTER_LIMIT = 2;
+
+ // The query is finished, but there may be more results after the end cursor.
+ MORE_RESULTS_AFTER_CURSOR = 4;
+
+ // The query has been exhausted.
+ NO_MORE_RESULTS = 3;
+ }
+
+ // The number of results skipped, typically because of an offset.
+ int32 skipped_results = 6;
+
+ // A cursor that points to the position after the last skipped result.
+ // Will be set when `skipped_results` != 0.
+ bytes skipped_cursor = 3;
+
+ // The result type for every entity in `entity_results`.
+ EntityResult.ResultType entity_result_type = 1;
+
+ // The results for this batch.
+ repeated EntityResult entity_results = 2;
+
+ // A cursor that points to the position after the last result in the batch.
+ bytes end_cursor = 4;
+
+ // The state of the query after the current batch.
+ MoreResultsType more_results = 5;
+}
diff --git a/gcloud/datastore/_generated/datastore_grpc_pb2.py b/gcloud/datastore/_generated/datastore_grpc_pb2.py
new file mode 100644
index 000000000000..5e648344259e
--- /dev/null
+++ b/gcloud/datastore/_generated/datastore_grpc_pb2.py
@@ -0,0 +1,279 @@
+import abc
+from grpc.beta import implementations as beta_implementations
+from grpc.early_adopter import implementations as early_adopter_implementations
+from grpc.framework.alpha import utilities as alpha_utilities
+from grpc.framework.common import cardinality
+from grpc.framework.interfaces.face import utilities as face_utilities
+class EarlyAdopterDatastoreServicer(object):
+ """"""
+ __metaclass__ = abc.ABCMeta
+ @abc.abstractmethod
+ def Lookup(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def RunQuery(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def BeginTransaction(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def Commit(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def Rollback(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def AllocateIds(self, request, context):
+ raise NotImplementedError()
+class EarlyAdopterDatastoreServer(object):
+ """"""
+ __metaclass__ = abc.ABCMeta
+ @abc.abstractmethod
+ def start(self):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def stop(self):
+ raise NotImplementedError()
+class EarlyAdopterDatastoreStub(object):
+ """"""
+ __metaclass__ = abc.ABCMeta
+ @abc.abstractmethod
+ def Lookup(self, request):
+ raise NotImplementedError()
+ Lookup.async = None
+ @abc.abstractmethod
+ def RunQuery(self, request):
+ raise NotImplementedError()
+ RunQuery.async = None
+ @abc.abstractmethod
+ def BeginTransaction(self, request):
+ raise NotImplementedError()
+ BeginTransaction.async = None
+ @abc.abstractmethod
+ def Commit(self, request):
+ raise NotImplementedError()
+ Commit.async = None
+ @abc.abstractmethod
+ def Rollback(self, request):
+ raise NotImplementedError()
+ Rollback.async = None
+ @abc.abstractmethod
+ def AllocateIds(self, request):
+ raise NotImplementedError()
+ AllocateIds.async = None
+def early_adopter_create_Datastore_server(servicer, port, private_key=None, certificate_chain=None):
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ method_service_descriptions = {
+ "AllocateIds": alpha_utilities.unary_unary_service_description(
+ servicer.AllocateIds,
+ gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.FromString,
+ gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.SerializeToString,
+ ),
+ "BeginTransaction": alpha_utilities.unary_unary_service_description(
+ servicer.BeginTransaction,
+ gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.FromString,
+ gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.SerializeToString,
+ ),
+ "Commit": alpha_utilities.unary_unary_service_description(
+ servicer.Commit,
+ gcloud.datastore._generated.datastore_pb2.CommitRequest.FromString,
+ gcloud.datastore._generated.datastore_pb2.CommitResponse.SerializeToString,
+ ),
+ "Lookup": alpha_utilities.unary_unary_service_description(
+ servicer.Lookup,
+ gcloud.datastore._generated.datastore_pb2.LookupRequest.FromString,
+ gcloud.datastore._generated.datastore_pb2.LookupResponse.SerializeToString,
+ ),
+ "Rollback": alpha_utilities.unary_unary_service_description(
+ servicer.Rollback,
+ gcloud.datastore._generated.datastore_pb2.RollbackRequest.FromString,
+ gcloud.datastore._generated.datastore_pb2.RollbackResponse.SerializeToString,
+ ),
+ "RunQuery": alpha_utilities.unary_unary_service_description(
+ servicer.RunQuery,
+ gcloud.datastore._generated.datastore_pb2.RunQueryRequest.FromString,
+ gcloud.datastore._generated.datastore_pb2.RunQueryResponse.SerializeToString,
+ ),
+ }
+ return early_adopter_implementations.server("google.datastore.v1beta3.Datastore", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
+def early_adopter_create_Datastore_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ method_invocation_descriptions = {
+ "AllocateIds": alpha_utilities.unary_unary_invocation_description(
+ gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.SerializeToString,
+ gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.FromString,
+ ),
+ "BeginTransaction": alpha_utilities.unary_unary_invocation_description(
+ gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.SerializeToString,
+ gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.FromString,
+ ),
+ "Commit": alpha_utilities.unary_unary_invocation_description(
+ gcloud.datastore._generated.datastore_pb2.CommitRequest.SerializeToString,
+ gcloud.datastore._generated.datastore_pb2.CommitResponse.FromString,
+ ),
+ "Lookup": alpha_utilities.unary_unary_invocation_description(
+ gcloud.datastore._generated.datastore_pb2.LookupRequest.SerializeToString,
+ gcloud.datastore._generated.datastore_pb2.LookupResponse.FromString,
+ ),
+ "Rollback": alpha_utilities.unary_unary_invocation_description(
+ gcloud.datastore._generated.datastore_pb2.RollbackRequest.SerializeToString,
+ gcloud.datastore._generated.datastore_pb2.RollbackResponse.FromString,
+ ),
+ "RunQuery": alpha_utilities.unary_unary_invocation_description(
+ gcloud.datastore._generated.datastore_pb2.RunQueryRequest.SerializeToString,
+ gcloud.datastore._generated.datastore_pb2.RunQueryResponse.FromString,
+ ),
+ }
+ return early_adopter_implementations.stub("google.datastore.v1beta3.Datastore", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
+
+class BetaDatastoreServicer(object):
+ """"""
+ __metaclass__ = abc.ABCMeta
+ @abc.abstractmethod
+ def Lookup(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def RunQuery(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def BeginTransaction(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def Commit(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def Rollback(self, request, context):
+ raise NotImplementedError()
+ @abc.abstractmethod
+ def AllocateIds(self, request, context):
+ raise NotImplementedError()
+
+class BetaDatastoreStub(object):
+ """The interface to which stubs will conform."""
+ __metaclass__ = abc.ABCMeta
+ @abc.abstractmethod
+ def Lookup(self, request, timeout):
+ raise NotImplementedError()
+ Lookup.future = None
+ @abc.abstractmethod
+ def RunQuery(self, request, timeout):
+ raise NotImplementedError()
+ RunQuery.future = None
+ @abc.abstractmethod
+ def BeginTransaction(self, request, timeout):
+ raise NotImplementedError()
+ BeginTransaction.future = None
+ @abc.abstractmethod
+ def Commit(self, request, timeout):
+ raise NotImplementedError()
+ Commit.future = None
+ @abc.abstractmethod
+ def Rollback(self, request, timeout):
+ raise NotImplementedError()
+ Rollback.future = None
+ @abc.abstractmethod
+ def AllocateIds(self, request, timeout):
+ raise NotImplementedError()
+ AllocateIds.future = None
+
+def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ request_deserializers = {
+ ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.FromString,
+ ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.FromString,
+ ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitRequest.FromString,
+ ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupRequest.FromString,
+ ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackRequest.FromString,
+ ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryRequest.FromString,
+ }
+ response_serializers = {
+ ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitResponse.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupResponse.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackResponse.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryResponse.SerializeToString,
+ }
+ method_implementations = {
+ ('google.datastore.v1beta3.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds),
+ ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction),
+ ('google.datastore.v1beta3.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit),
+ ('google.datastore.v1beta3.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup),
+ ('google.datastore.v1beta3.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback),
+ ('google.datastore.v1beta3.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery),
+ }
+ server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
+ return beta_implementations.server(method_implementations, options=server_options)
+
+def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ import gcloud.datastore._generated.datastore_pb2
+ request_serializers = {
+ ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitRequest.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupRequest.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackRequest.SerializeToString,
+ ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryRequest.SerializeToString,
+ }
+ response_deserializers = {
+ ('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.FromString,
+ ('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.FromString,
+ ('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitResponse.FromString,
+ ('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupResponse.FromString,
+ ('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackResponse.FromString,
+ ('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryResponse.FromString,
+ }
+ cardinalities = {
+ 'AllocateIds': cardinality.Cardinality.UNARY_UNARY,
+ 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY,
+ 'Commit': cardinality.Cardinality.UNARY_UNARY,
+ 'Lookup': cardinality.Cardinality.UNARY_UNARY,
+ 'Rollback': cardinality.Cardinality.UNARY_UNARY,
+ 'RunQuery': cardinality.Cardinality.UNARY_UNARY,
+ }
+ stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
+ return beta_implementations.dynamic_stub(channel, 'google.datastore.v1beta3.Datastore', cardinalities, options=stub_options)
diff --git a/gcloud/datastore/_generated/datastore_pb2.py b/gcloud/datastore/_generated/datastore_pb2.py
index 398146391c2f..ffba033868c0 100644
--- a/gcloud/datastore/_generated/datastore_pb2.py
+++ b/gcloud/datastore/_generated/datastore_pb2.py
@@ -1,37 +1,862 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Datastore shim to emulate v1beta3 module structure.
-
-This module intended to pair with datastore.proto.
-"""
-
-from gcloud.datastore import _datastore_v1_pb2
-
-
-LookupRequest = _datastore_v1_pb2.LookupRequest
-LookupResponse = _datastore_v1_pb2.LookupResponse
-RunQueryRequest = _datastore_v1_pb2.RunQueryRequest
-RunQueryResponse = _datastore_v1_pb2.RunQueryResponse
-BeginTransactionRequest = _datastore_v1_pb2.BeginTransactionRequest
-BeginTransactionResponse = _datastore_v1_pb2.BeginTransactionResponse
-RollbackRequest = _datastore_v1_pb2.RollbackRequest
-RollbackResponse = _datastore_v1_pb2.RollbackResponse
-CommitRequest = _datastore_v1_pb2.CommitRequest
-CommitResponse = _datastore_v1_pb2.CommitResponse
-AllocateIdsRequest = _datastore_v1_pb2.AllocateIdsRequest
-AllocateIdsResponse = _datastore_v1_pb2.AllocateIdsResponse
-Mutation = _datastore_v1_pb2.Mutation
-MutationResult = _datastore_v1_pb2.MutationResult
-ReadOptions = _datastore_v1_pb2.ReadOptions
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/datastore/v1beta3/datastore.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from gcloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1beta3_dot_entity__pb2
+from gcloud.datastore._generated import query_pb2 as google_dot_datastore_dot_v1beta3_dot_query__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='google/datastore/v1beta3/datastore.proto',
+ package='google.datastore.v1beta3',
+ syntax='proto3',
+ serialized_pb=b'\n(google/datastore/v1beta3/datastore.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a%google/datastore/v1beta3/entity.proto\x1a$google/datastore/v1beta3/query.proto\"\x8d\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12;\n\x0cread_options\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.ReadOptions\x12+\n\x04keys\x18\x03 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xb1\x01\n\x0eLookupResponse\x12\x35\n\x05\x66ound\x18\x01 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12\x37\n\x07missing\x18\x02 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12/\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\x98\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12;\n\x0cpartition_id\x18\x02 \x01(\x0b\x32%.google.datastore.v1beta3.PartitionId\x12;\n\x0cread_options\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.ReadOptions\x12\x30\n\x05query\x18\x03 \x01(\x0b\x32\x1f.google.datastore.v1beta3.QueryH\x00\x12\x37\n\tgql_query\x18\x07 \x01(\x0b\x32\".google.datastore.v1beta3.GqlQueryH\x00\x42\x0c\n\nquery_type\"}\n\x10RunQueryResponse\x12\x39\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32*.google.datastore.v1beta3.QueryResultBatch\x12.\n\x05query\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Query\"-\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x8d\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12:\n\x04mode\x18\x05 \x01(\x0e\x32,.google.datastore.v1beta3.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x35\n\tmutations\x18\x06 \x03(\x0b\x32\".google.datastore.v1beta3.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"k\n\x0e\x43ommitResponse\x12\x42\n\x10mutation_results\x18\x03 \x03(\x0b\x32(.google.datastore.v1beta3.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"U\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12+\n\x04keys\x18\x01 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"B\n\x13\x41llocateIdsResponse\x12+\n\x04keys\x18\x01 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xe4\x01\n\x08Mutation\x12\x32\n\x06insert\x18\x04 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12\x32\n\x06update\x18\x05 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12\x32\n\x06upsert\x18\x06 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12/\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1beta3.KeyH\x00\x42\x0b\n\toperation\"<\n\x0eMutationResult\x12*\n\x03key\x18\x03 \x01(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xda\x01\n\x0bReadOptions\x12Q\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x35.google.datastore.v1beta3.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type2\xb7\x07\n\tDatastore\x12\x8d\x01\n\x06Lookup\x12\'.google.datastore.v1beta3.LookupRequest\x1a(.google.datastore.v1beta3.LookupResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1beta3/projects/{project_id}:lookup:\x01*\x12\x95\x01\n\x08RunQuery\x12).google.datastore.v1beta3.RunQueryRequest\x1a*.google.datastore.v1beta3.RunQueryResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1beta3/projects/{project_id}:runQuery:\x01*\x12\xb5\x01\n\x10\x42\x65ginTransaction\x12\x31.google.datastore.v1beta3.BeginTransactionRequest\x1a\x32.google.datastore.v1beta3.BeginTransactionResponse\":\x82\xd3\xe4\x93\x02\x34\"//v1beta3/projects/{project_id}:beginTransaction:\x01*\x12\x8d\x01\n\x06\x43ommit\x12\'.google.datastore.v1beta3.CommitRequest\x1a(.google.datastore.v1beta3.CommitResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1beta3/projects/{project_id}:commit:\x01*\x12\x95\x01\n\x08Rollback\x12).google.datastore.v1beta3.RollbackRequest\x1a*.google.datastore.v1beta3.RollbackResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1beta3/projects/{project_id}:rollback:\x01*\x12\xa1\x01\n\x0b\x41llocateIds\x12,.google.datastore.v1beta3.AllocateIdsRequest\x1a-.google.datastore.v1beta3.AllocateIdsResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1beta3/projects/{project_id}:allocateIds:\x01*B0\n\x1c\x63om.google.datastore.v1beta3B\x0e\x44\x61tastoreProtoP\x01\x62\x06proto3'
+ ,
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_entity__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_query__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_COMMITREQUEST_MODE = _descriptor.EnumDescriptor(
+ name='Mode',
+ full_name='google.datastore.v1beta3.CommitRequest.Mode',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='MODE_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TRANSACTIONAL', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NON_TRANSACTIONAL', index=2, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1263,
+ serialized_end=1333,
+)
+_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE)
+
+_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor(
+ name='ReadConsistency',
+ full_name='google.datastore.v1beta3.ReadOptions.ReadConsistency',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='READ_CONSISTENCY_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='STRONG', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EVENTUAL', index=2, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=2038,
+ serialized_end=2115,
+)
+_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY)
+
+
+_LOOKUPREQUEST = _descriptor.Descriptor(
+ name='LookupRequest',
+ full_name='google.datastore.v1beta3.LookupRequest',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.LookupRequest.project_id', index=0,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='read_options', full_name='google.datastore.v1beta3.LookupRequest.read_options', index=1,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='keys', full_name='google.datastore.v1beta3.LookupRequest.keys', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=178,
+ serialized_end=319,
+)
+
+
+_LOOKUPRESPONSE = _descriptor.Descriptor(
+ name='LookupResponse',
+ full_name='google.datastore.v1beta3.LookupResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='found', full_name='google.datastore.v1beta3.LookupResponse.found', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='missing', full_name='google.datastore.v1beta3.LookupResponse.missing', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='deferred', full_name='google.datastore.v1beta3.LookupResponse.deferred', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=322,
+ serialized_end=499,
+)
+
+
+_RUNQUERYREQUEST = _descriptor.Descriptor(
+ name='RunQueryRequest',
+ full_name='google.datastore.v1beta3.RunQueryRequest',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.RunQueryRequest.project_id', index=0,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='partition_id', full_name='google.datastore.v1beta3.RunQueryRequest.partition_id', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='read_options', full_name='google.datastore.v1beta3.RunQueryRequest.read_options', index=2,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='query', full_name='google.datastore.v1beta3.RunQueryRequest.query', index=3,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='gql_query', full_name='google.datastore.v1beta3.RunQueryRequest.gql_query', index=4,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='query_type', full_name='google.datastore.v1beta3.RunQueryRequest.query_type',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=502,
+ serialized_end=782,
+)
+
+
+_RUNQUERYRESPONSE = _descriptor.Descriptor(
+ name='RunQueryResponse',
+ full_name='google.datastore.v1beta3.RunQueryResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='batch', full_name='google.datastore.v1beta3.RunQueryResponse.batch', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='query', full_name='google.datastore.v1beta3.RunQueryResponse.query', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=784,
+ serialized_end=909,
+)
+
+
+_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor(
+ name='BeginTransactionRequest',
+ full_name='google.datastore.v1beta3.BeginTransactionRequest',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.BeginTransactionRequest.project_id', index=0,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=911,
+ serialized_end=956,
+)
+
+
+_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor(
+ name='BeginTransactionResponse',
+ full_name='google.datastore.v1beta3.BeginTransactionResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='transaction', full_name='google.datastore.v1beta3.BeginTransactionResponse.transaction', index=0,
+ number=1, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=958,
+ serialized_end=1005,
+)
+
+
+_ROLLBACKREQUEST = _descriptor.Descriptor(
+ name='RollbackRequest',
+ full_name='google.datastore.v1beta3.RollbackRequest',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.RollbackRequest.project_id', index=0,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='transaction', full_name='google.datastore.v1beta3.RollbackRequest.transaction', index=1,
+ number=1, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1007,
+ serialized_end=1065,
+)
+
+
+_ROLLBACKRESPONSE = _descriptor.Descriptor(
+ name='RollbackResponse',
+ full_name='google.datastore.v1beta3.RollbackResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1067,
+ serialized_end=1085,
+)
+
+
+_COMMITREQUEST = _descriptor.Descriptor(
+ name='CommitRequest',
+ full_name='google.datastore.v1beta3.CommitRequest',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.CommitRequest.project_id', index=0,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='mode', full_name='google.datastore.v1beta3.CommitRequest.mode', index=1,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='transaction', full_name='google.datastore.v1beta3.CommitRequest.transaction', index=2,
+ number=1, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='mutations', full_name='google.datastore.v1beta3.CommitRequest.mutations', index=3,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _COMMITREQUEST_MODE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='transaction_selector', full_name='google.datastore.v1beta3.CommitRequest.transaction_selector',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=1088,
+ serialized_end=1357,
+)
+
+
+_COMMITRESPONSE = _descriptor.Descriptor(
+ name='CommitResponse',
+ full_name='google.datastore.v1beta3.CommitResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='mutation_results', full_name='google.datastore.v1beta3.CommitResponse.mutation_results', index=0,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='index_updates', full_name='google.datastore.v1beta3.CommitResponse.index_updates', index=1,
+ number=4, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1359,
+ serialized_end=1466,
+)
+
+
+_ALLOCATEIDSREQUEST = _descriptor.Descriptor(
+ name='AllocateIdsRequest',
+ full_name='google.datastore.v1beta3.AllocateIdsRequest',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.AllocateIdsRequest.project_id', index=0,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='keys', full_name='google.datastore.v1beta3.AllocateIdsRequest.keys', index=1,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1468,
+ serialized_end=1553,
+)
+
+
+_ALLOCATEIDSRESPONSE = _descriptor.Descriptor(
+ name='AllocateIdsResponse',
+ full_name='google.datastore.v1beta3.AllocateIdsResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='keys', full_name='google.datastore.v1beta3.AllocateIdsResponse.keys', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1555,
+ serialized_end=1621,
+)
+
+
+_MUTATION = _descriptor.Descriptor(
+ name='Mutation',
+ full_name='google.datastore.v1beta3.Mutation',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='insert', full_name='google.datastore.v1beta3.Mutation.insert', index=0,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='update', full_name='google.datastore.v1beta3.Mutation.update', index=1,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='upsert', full_name='google.datastore.v1beta3.Mutation.upsert', index=2,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='delete', full_name='google.datastore.v1beta3.Mutation.delete', index=3,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='operation', full_name='google.datastore.v1beta3.Mutation.operation',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=1624,
+ serialized_end=1852,
+)
+
+
+_MUTATIONRESULT = _descriptor.Descriptor(
+ name='MutationResult',
+ full_name='google.datastore.v1beta3.MutationResult',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='google.datastore.v1beta3.MutationResult.key', index=0,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1854,
+ serialized_end=1914,
+)
+
+
+_READOPTIONS = _descriptor.Descriptor(
+ name='ReadOptions',
+ full_name='google.datastore.v1beta3.ReadOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='read_consistency', full_name='google.datastore.v1beta3.ReadOptions.read_consistency', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='transaction', full_name='google.datastore.v1beta3.ReadOptions.transaction', index=1,
+ number=2, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _READOPTIONS_READCONSISTENCY,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='consistency_type', full_name='google.datastore.v1beta3.ReadOptions.consistency_type',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=1917,
+ serialized_end=2135,
+)
+
+_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
+_LOOKUPREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
+_LOOKUPRESPONSE.fields_by_name['found'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._ENTITYRESULT
+_LOOKUPRESPONSE.fields_by_name['missing'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._ENTITYRESULT
+_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
+_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._PARTITIONID
+_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
+_RUNQUERYREQUEST.fields_by_name['query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERY
+_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._GQLQUERY
+_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append(
+ _RUNQUERYREQUEST.fields_by_name['query'])
+_RUNQUERYREQUEST.fields_by_name['query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type']
+_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append(
+ _RUNQUERYREQUEST.fields_by_name['gql_query'])
+_RUNQUERYREQUEST.fields_by_name['gql_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type']
+_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERYRESULTBATCH
+_RUNQUERYRESPONSE.fields_by_name['query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERY
+_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE
+_COMMITREQUEST.fields_by_name['mutations'].message_type = _MUTATION
+_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST
+_COMMITREQUEST.oneofs_by_name['transaction_selector'].fields.append(
+ _COMMITREQUEST.fields_by_name['transaction'])
+_COMMITREQUEST.fields_by_name['transaction'].containing_oneof = _COMMITREQUEST.oneofs_by_name['transaction_selector']
+_COMMITRESPONSE.fields_by_name['mutation_results'].message_type = _MUTATIONRESULT
+_ALLOCATEIDSREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
+_ALLOCATEIDSRESPONSE.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
+_MUTATION.fields_by_name['insert'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
+_MUTATION.fields_by_name['update'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
+_MUTATION.fields_by_name['upsert'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
+_MUTATION.fields_by_name['delete'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
+_MUTATION.oneofs_by_name['operation'].fields.append(
+ _MUTATION.fields_by_name['insert'])
+_MUTATION.fields_by_name['insert'].containing_oneof = _MUTATION.oneofs_by_name['operation']
+_MUTATION.oneofs_by_name['operation'].fields.append(
+ _MUTATION.fields_by_name['update'])
+_MUTATION.fields_by_name['update'].containing_oneof = _MUTATION.oneofs_by_name['operation']
+_MUTATION.oneofs_by_name['operation'].fields.append(
+ _MUTATION.fields_by_name['upsert'])
+_MUTATION.fields_by_name['upsert'].containing_oneof = _MUTATION.oneofs_by_name['operation']
+_MUTATION.oneofs_by_name['operation'].fields.append(
+ _MUTATION.fields_by_name['delete'])
+_MUTATION.fields_by_name['delete'].containing_oneof = _MUTATION.oneofs_by_name['operation']
+_MUTATIONRESULT.fields_by_name['key'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
+_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY
+_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS
+_READOPTIONS.oneofs_by_name['consistency_type'].fields.append(
+ _READOPTIONS.fields_by_name['read_consistency'])
+_READOPTIONS.fields_by_name['read_consistency'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type']
+_READOPTIONS.oneofs_by_name['consistency_type'].fields.append(
+ _READOPTIONS.fields_by_name['transaction'])
+_READOPTIONS.fields_by_name['transaction'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type']
+DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST
+DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE
+DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST
+DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE
+DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST
+DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE
+DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST
+DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE
+DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST
+DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE
+DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST
+DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE
+DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION
+DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT
+DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS
+
+LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict(
+ DESCRIPTOR = _LOOKUPREQUEST,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupRequest)
+ ))
+_sym_db.RegisterMessage(LookupRequest)
+
+LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict(
+ DESCRIPTOR = _LOOKUPRESPONSE,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupResponse)
+ ))
+_sym_db.RegisterMessage(LookupResponse)
+
+RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict(
+ DESCRIPTOR = _RUNQUERYREQUEST,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RunQueryRequest)
+ ))
+_sym_db.RegisterMessage(RunQueryRequest)
+
+RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict(
+ DESCRIPTOR = _RUNQUERYRESPONSE,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RunQueryResponse)
+ ))
+_sym_db.RegisterMessage(RunQueryResponse)
+
+BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict(
+ DESCRIPTOR = _BEGINTRANSACTIONREQUEST,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionRequest)
+ ))
+_sym_db.RegisterMessage(BeginTransactionRequest)
+
+BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict(
+ DESCRIPTOR = _BEGINTRANSACTIONRESPONSE,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionResponse)
+ ))
+_sym_db.RegisterMessage(BeginTransactionResponse)
+
+RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict(
+ DESCRIPTOR = _ROLLBACKREQUEST,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RollbackRequest)
+ ))
+_sym_db.RegisterMessage(RollbackRequest)
+
+RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict(
+ DESCRIPTOR = _ROLLBACKRESPONSE,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RollbackResponse)
+ ))
+_sym_db.RegisterMessage(RollbackResponse)
+
+CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict(
+ DESCRIPTOR = _COMMITREQUEST,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CommitRequest)
+ ))
+_sym_db.RegisterMessage(CommitRequest)
+
+CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict(
+ DESCRIPTOR = _COMMITRESPONSE,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CommitResponse)
+ ))
+_sym_db.RegisterMessage(CommitResponse)
+
+AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict(
+ DESCRIPTOR = _ALLOCATEIDSREQUEST,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.AllocateIdsRequest)
+ ))
+_sym_db.RegisterMessage(AllocateIdsRequest)
+
+AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict(
+ DESCRIPTOR = _ALLOCATEIDSRESPONSE,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.AllocateIdsResponse)
+ ))
+_sym_db.RegisterMessage(AllocateIdsResponse)
+
+Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict(
+ DESCRIPTOR = _MUTATION,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Mutation)
+ ))
+_sym_db.RegisterMessage(Mutation)
+
+MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict(
+ DESCRIPTOR = _MUTATIONRESULT,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.MutationResult)
+ ))
+_sym_db.RegisterMessage(MutationResult)
+
+ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict(
+ DESCRIPTOR = _READOPTIONS,
+ __module__ = 'google.datastore.v1beta3.datastore_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.ReadOptions)
+ ))
+_sym_db.RegisterMessage(ReadOptions)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\016DatastoreProtoP\001')
+# @@protoc_insertion_point(module_scope)
diff --git a/gcloud/datastore/_generated/entity_pb2.py b/gcloud/datastore/_generated/entity_pb2.py
index 4c071ac38de1..3295047f731f 100644
--- a/gcloud/datastore/_generated/entity_pb2.py
+++ b/gcloud/datastore/_generated/entity_pb2.py
@@ -1,26 +1,493 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Datastore shim to emulate v1beta3 module structure.
-
-This module intended to pair with entity.proto.
-"""
-
-from gcloud.datastore import _datastore_v1_pb2
-
-
-PartitionId = _datastore_v1_pb2.PartitionId
-Key = _datastore_v1_pb2.Key
-Value = _datastore_v1_pb2.Value
-Entity = _datastore_v1_pb2.Entity
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/datastore/v1beta3/entity.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='google/datastore/v1beta3/entity.proto',
+ package='google.datastore.v1beta3',
+ syntax='proto3',
+ serialized_pb=b'\n%google/datastore/v1beta3/entity.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xc1\x01\n\x03Key\x12;\n\x0cpartition_id\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.PartitionId\x12\x37\n\x04path\x18\x02 \x03(\x0b\x32).google.datastore.v1beta3.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.datastore.v1beta3.Value\"\x80\x04\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x32\n\tkey_value\x18\x05 \x01(\x0b\x32\x1d.google.datastore.v1beta3.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x38\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.datastore.v1beta3.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xce\x01\n\x06\x45ntity\x12*\n\x03key\x18\x01 \x01(\x0b\x32\x1d.google.datastore.v1beta3.Key\x12\x44\n\nproperties\x18\x03 \x03(\x0b\x32\x30.google.datastore.v1beta3.Entity.PropertiesEntry\x1aR\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Value:\x02\x38\x01\x42-\n\x1c\x63om.google.datastore.v1beta3B\x0b\x45ntityProtoP\x01\x62\x06proto3'
+ ,
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_PARTITIONID = _descriptor.Descriptor(
+ name='PartitionId',
+ full_name='google.datastore.v1beta3.PartitionId',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='project_id', full_name='google.datastore.v1beta3.PartitionId.project_id', index=0,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='namespace_id', full_name='google.datastore.v1beta3.PartitionId.namespace_id', index=1,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=186,
+ serialized_end=241,
+)
+
+
+_KEY_PATHELEMENT = _descriptor.Descriptor(
+ name='PathElement',
+ full_name='google.datastore.v1beta3.Key.PathElement',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='kind', full_name='google.datastore.v1beta3.Key.PathElement.kind', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='id', full_name='google.datastore.v1beta3.Key.PathElement.id', index=1,
+ number=2, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.datastore.v1beta3.Key.PathElement.name', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='id_type', full_name='google.datastore.v1beta3.Key.PathElement.id_type',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=369,
+ serialized_end=437,
+)
+
+_KEY = _descriptor.Descriptor(
+ name='Key',
+ full_name='google.datastore.v1beta3.Key',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='partition_id', full_name='google.datastore.v1beta3.Key.partition_id', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='path', full_name='google.datastore.v1beta3.Key.path', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_KEY_PATHELEMENT, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=244,
+ serialized_end=437,
+)
+
+
+_ARRAYVALUE = _descriptor.Descriptor(
+ name='ArrayValue',
+ full_name='google.datastore.v1beta3.ArrayValue',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='values', full_name='google.datastore.v1beta3.ArrayValue.values', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=439,
+ serialized_end=500,
+)
+
+
+_VALUE = _descriptor.Descriptor(
+ name='Value',
+ full_name='google.datastore.v1beta3.Value',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='null_value', full_name='google.datastore.v1beta3.Value.null_value', index=0,
+ number=11, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='boolean_value', full_name='google.datastore.v1beta3.Value.boolean_value', index=1,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='integer_value', full_name='google.datastore.v1beta3.Value.integer_value', index=2,
+ number=2, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='double_value', full_name='google.datastore.v1beta3.Value.double_value', index=3,
+ number=3, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='timestamp_value', full_name='google.datastore.v1beta3.Value.timestamp_value', index=4,
+ number=10, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='key_value', full_name='google.datastore.v1beta3.Value.key_value', index=5,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='string_value', full_name='google.datastore.v1beta3.Value.string_value', index=6,
+ number=17, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='blob_value', full_name='google.datastore.v1beta3.Value.blob_value', index=7,
+ number=18, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='geo_point_value', full_name='google.datastore.v1beta3.Value.geo_point_value', index=8,
+ number=8, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='entity_value', full_name='google.datastore.v1beta3.Value.entity_value', index=9,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='array_value', full_name='google.datastore.v1beta3.Value.array_value', index=10,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='meaning', full_name='google.datastore.v1beta3.Value.meaning', index=11,
+ number=14, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='exclude_from_indexes', full_name='google.datastore.v1beta3.Value.exclude_from_indexes', index=12,
+ number=19, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='value_type', full_name='google.datastore.v1beta3.Value.value_type',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=503,
+ serialized_end=1015,
+)
+
+
+_ENTITY_PROPERTIESENTRY = _descriptor.Descriptor(
+ name='PropertiesEntry',
+ full_name='google.datastore.v1beta3.Entity.PropertiesEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='google.datastore.v1beta3.Entity.PropertiesEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.datastore.v1beta3.Entity.PropertiesEntry.value', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'),
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1142,
+ serialized_end=1224,
+)
+
+_ENTITY = _descriptor.Descriptor(
+ name='Entity',
+ full_name='google.datastore.v1beta3.Entity',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='google.datastore.v1beta3.Entity.key', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='properties', full_name='google.datastore.v1beta3.Entity.properties', index=1,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_ENTITY_PROPERTIESENTRY, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1018,
+ serialized_end=1224,
+)
+
+_KEY_PATHELEMENT.containing_type = _KEY
+_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append(
+ _KEY_PATHELEMENT.fields_by_name['id'])
+_KEY_PATHELEMENT.fields_by_name['id'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type']
+_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append(
+ _KEY_PATHELEMENT.fields_by_name['name'])
+_KEY_PATHELEMENT.fields_by_name['name'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type']
+_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID
+_KEY.fields_by_name['path'].message_type = _KEY_PATHELEMENT
+_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE
+_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE
+_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
+_VALUE.fields_by_name['key_value'].message_type = _KEY
+_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG
+_VALUE.fields_by_name['entity_value'].message_type = _ENTITY
+_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['null_value'])
+_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['boolean_value'])
+_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['integer_value'])
+_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['double_value'])
+_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['timestamp_value'])
+_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['key_value'])
+_VALUE.fields_by_name['key_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['string_value'])
+_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['blob_value'])
+_VALUE.fields_by_name['blob_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['geo_point_value'])
+_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['entity_value'])
+_VALUE.fields_by_name['entity_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_VALUE.oneofs_by_name['value_type'].fields.append(
+ _VALUE.fields_by_name['array_value'])
+_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
+_ENTITY_PROPERTIESENTRY.fields_by_name['value'].message_type = _VALUE
+_ENTITY_PROPERTIESENTRY.containing_type = _ENTITY
+_ENTITY.fields_by_name['key'].message_type = _KEY
+_ENTITY.fields_by_name['properties'].message_type = _ENTITY_PROPERTIESENTRY
+DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID
+DESCRIPTOR.message_types_by_name['Key'] = _KEY
+DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE
+DESCRIPTOR.message_types_by_name['Value'] = _VALUE
+DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY
+
+PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict(
+ DESCRIPTOR = _PARTITIONID,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PartitionId)
+ ))
+_sym_db.RegisterMessage(PartitionId)
+
+Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict(
+
+ PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict(
+ DESCRIPTOR = _KEY_PATHELEMENT,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Key.PathElement)
+ ))
+ ,
+ DESCRIPTOR = _KEY,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Key)
+ ))
+_sym_db.RegisterMessage(Key)
+_sym_db.RegisterMessage(Key.PathElement)
+
+ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict(
+ DESCRIPTOR = _ARRAYVALUE,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.ArrayValue)
+ ))
+_sym_db.RegisterMessage(ArrayValue)
+
+Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict(
+ DESCRIPTOR = _VALUE,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Value)
+ ))
+_sym_db.RegisterMessage(Value)
+
+Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict(
+
+ PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict(
+ DESCRIPTOR = _ENTITY_PROPERTIESENTRY,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Entity.PropertiesEntry)
+ ))
+ ,
+ DESCRIPTOR = _ENTITY,
+ __module__ = 'google.datastore.v1beta3.entity_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Entity)
+ ))
+_sym_db.RegisterMessage(Entity)
+_sym_db.RegisterMessage(Entity.PropertiesEntry)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\013EntityProtoP\001')
+_ENTITY_PROPERTIESENTRY.has_options = True
+_ENTITY_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001')
+# @@protoc_insertion_point(module_scope)
diff --git a/gcloud/datastore/_generated/query_pb2.py b/gcloud/datastore/_generated/query_pb2.py
index b3427c33525c..e843253850be 100644
--- a/gcloud/datastore/_generated/query_pb2.py
+++ b/gcloud/datastore/_generated/query_pb2.py
@@ -1,33 +1,917 @@
-# Copyright 2015 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Datastore shim to emulate v1beta3 module structure.
-
-This module intended to pair with query.proto.
-"""
-
-from gcloud.datastore import _datastore_v1_pb2
-
-
-EntityResult = _datastore_v1_pb2.EntityResult
-Query = _datastore_v1_pb2.Query
-KindExpression = _datastore_v1_pb2.KindExpression
-PropertyReference = _datastore_v1_pb2.PropertyReference
-PropertyOrder = _datastore_v1_pb2.PropertyOrder
-Filter = _datastore_v1_pb2.Filter
-CompositeFilter = _datastore_v1_pb2.CompositeFilter
-PropertyFilter = _datastore_v1_pb2.PropertyFilter
-GqlQuery = _datastore_v1_pb2.GqlQuery
-GqlQueryArg = _datastore_v1_pb2.GqlQueryArg
-QueryResultBatch = _datastore_v1_pb2.QueryResultBatch
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/datastore/v1beta3/query.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
+from gcloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1beta3_dot_entity__pb2
+from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='google/datastore/v1beta3/query.proto',
+ package='google.datastore.v1beta3',
+ syntax='proto3',
+ serialized_pb=b'\n$google/datastore/v1beta3/query.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a%google/datastore/v1beta3/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xa3\x01\n\x0c\x45ntityResult\x12\x30\n\x06\x65ntity\x18\x01 \x01(\x0b\x32 .google.datastore.v1beta3.Entity\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"Q\n\nResultType\x12\x1b\n\x17RESULT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\x8b\x03\n\x05Query\x12\x38\n\nprojection\x18\x02 \x03(\x0b\x32$.google.datastore.v1beta3.Projection\x12\x36\n\x04kind\x18\x03 \x03(\x0b\x32(.google.datastore.v1beta3.KindExpression\x12\x30\n\x06\x66ilter\x18\x04 \x01(\x0b\x32 .google.datastore.v1beta3.Filter\x12\x36\n\x05order\x18\x05 \x03(\x0b\x32\'.google.datastore.v1beta3.PropertyOrder\x12@\n\x0b\x64istinct_on\x18\x06 \x03(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x0e\n\x06offset\x18\n \x01(\x05\x12*\n\x05limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x01(\t\"K\n\nProjection\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\"\xdb\x01\n\rPropertyOrder\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12\x44\n\tdirection\x18\x02 \x01(\x0e\x32\x31.google.datastore.v1beta3.PropertyOrder.Direction\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\xa3\x01\n\x06\x46ilter\x12\x45\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32).google.datastore.v1beta3.CompositeFilterH\x00\x12\x43\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32(.google.datastore.v1beta3.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xb3\x01\n\x0f\x43ompositeFilter\x12>\n\x02op\x18\x01 \x01(\x0e\x32\x32.google.datastore.v1beta3.CompositeFilter.Operator\x12\x31\n\x07\x66ilters\x18\x02 \x03(\x0b\x32 .google.datastore.v1beta3.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xd6\x02\n\x0ePropertyFilter\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12=\n\x02op\x18\x02 \x01(\x0e\x32\x31.google.datastore.v1beta3.PropertyFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xb4\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12M\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x35.google.datastore.v1beta3.GqlQuery.NamedBindingsEntry\x12H\n\x13positional_bindings\x18\x04 \x03(\x0b\x32+.google.datastore.v1beta3.GqlQueryParameter\x1a\x61\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12:\n\x05value\x18\x02 \x01(\x0b\x32+.google.datastore.v1beta3.GqlQueryParameter:\x02\x38\x01\"i\n\x11GqlQueryParameter\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xd3\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12M\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32\x31.google.datastore.v1beta3.EntityResult.ResultType\x12>\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12P\n\x0cmore_results\x18\x05 \x01(\x0e\x32:.google.datastore.v1beta3.QueryResultBatch.MoreResultsType\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42,\n\x1c\x63om.google.datastore.v1beta3B\nQueryProtoP\x01\x62\x06proto3'
+ ,
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_entity__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor(
+ name='ResultType',
+ full_name='google.datastore.v1beta3.EntityResult.ResultType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='RESULT_TYPE_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FULL', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PROJECTION', index=2, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='KEY_ONLY', index=3, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=250,
+ serialized_end=331,
+)
+_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE)
+
+_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor(
+ name='Direction',
+ full_name='google.datastore.v1beta3.PropertyOrder.Direction',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='DIRECTION_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ASCENDING', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DESCENDING', index=2, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1026,
+ serialized_end=1095,
+)
+_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION)
+
+_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor(
+ name='Operator',
+ full_name='google.datastore.v1beta3.CompositeFilter.Operator',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='OPERATOR_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='AND', index=1, number=1,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1398,
+ serialized_end=1443,
+)
+_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR)
+
+_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor(
+ name='Operator',
+ full_name='google.datastore.v1beta3.PropertyFilter.Operator',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='OPERATOR_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LESS_THAN', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LESS_THAN_OR_EQUAL', index=2, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='GREATER_THAN', index=3, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='GREATER_THAN_OR_EQUAL', index=4, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EQUAL', index=5, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='HAS_ANCESTOR', index=6, number=11,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1639,
+ serialized_end=1788,
+)
+_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR)
+
+_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor(
+ name='MoreResultsType',
+ full_name='google.datastore.v1beta3.QueryResultBatch.MoreResultsType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='MORE_RESULTS_TYPE_UNSPECIFIED', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NOT_FINISHED', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MORE_RESULTS_AFTER_LIMIT', index=2, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MORE_RESULTS_AFTER_CURSOR', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NO_MORE_RESULTS', index=4, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=2524,
+ serialized_end=2676,
+)
+_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE)
+
+
+_ENTITYRESULT = _descriptor.Descriptor(
+ name='EntityResult',
+ full_name='google.datastore.v1beta3.EntityResult',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='entity', full_name='google.datastore.v1beta3.EntityResult.entity', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='cursor', full_name='google.datastore.v1beta3.EntityResult.cursor', index=1,
+ number=3, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _ENTITYRESULT_RESULTTYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=168,
+ serialized_end=331,
+)
+
+
+_QUERY = _descriptor.Descriptor(
+ name='Query',
+ full_name='google.datastore.v1beta3.Query',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='projection', full_name='google.datastore.v1beta3.Query.projection', index=0,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='kind', full_name='google.datastore.v1beta3.Query.kind', index=1,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='filter', full_name='google.datastore.v1beta3.Query.filter', index=2,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='order', full_name='google.datastore.v1beta3.Query.order', index=3,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='distinct_on', full_name='google.datastore.v1beta3.Query.distinct_on', index=4,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='start_cursor', full_name='google.datastore.v1beta3.Query.start_cursor', index=5,
+ number=7, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='end_cursor', full_name='google.datastore.v1beta3.Query.end_cursor', index=6,
+ number=8, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='offset', full_name='google.datastore.v1beta3.Query.offset', index=7,
+ number=10, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit', full_name='google.datastore.v1beta3.Query.limit', index=8,
+ number=12, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=334,
+ serialized_end=729,
+)
+
+
+_KINDEXPRESSION = _descriptor.Descriptor(
+ name='KindExpression',
+ full_name='google.datastore.v1beta3.KindExpression',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.datastore.v1beta3.KindExpression.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=731,
+ serialized_end=761,
+)
+
+
+_PROPERTYREFERENCE = _descriptor.Descriptor(
+ name='PropertyReference',
+ full_name='google.datastore.v1beta3.PropertyReference',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.datastore.v1beta3.PropertyReference.name', index=0,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=763,
+ serialized_end=796,
+)
+
+
+_PROJECTION = _descriptor.Descriptor(
+ name='Projection',
+ full_name='google.datastore.v1beta3.Projection',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='property', full_name='google.datastore.v1beta3.Projection.property', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=798,
+ serialized_end=873,
+)
+
+
+_PROPERTYORDER = _descriptor.Descriptor(
+ name='PropertyOrder',
+ full_name='google.datastore.v1beta3.PropertyOrder',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='property', full_name='google.datastore.v1beta3.PropertyOrder.property', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='direction', full_name='google.datastore.v1beta3.PropertyOrder.direction', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _PROPERTYORDER_DIRECTION,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=876,
+ serialized_end=1095,
+)
+
+
+_FILTER = _descriptor.Descriptor(
+ name='Filter',
+ full_name='google.datastore.v1beta3.Filter',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='composite_filter', full_name='google.datastore.v1beta3.Filter.composite_filter', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='property_filter', full_name='google.datastore.v1beta3.Filter.property_filter', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='filter_type', full_name='google.datastore.v1beta3.Filter.filter_type',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=1098,
+ serialized_end=1261,
+)
+
+
+_COMPOSITEFILTER = _descriptor.Descriptor(
+ name='CompositeFilter',
+ full_name='google.datastore.v1beta3.CompositeFilter',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='op', full_name='google.datastore.v1beta3.CompositeFilter.op', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='filters', full_name='google.datastore.v1beta3.CompositeFilter.filters', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _COMPOSITEFILTER_OPERATOR,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1264,
+ serialized_end=1443,
+)
+
+
+_PROPERTYFILTER = _descriptor.Descriptor(
+ name='PropertyFilter',
+ full_name='google.datastore.v1beta3.PropertyFilter',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='property', full_name='google.datastore.v1beta3.PropertyFilter.property', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='op', full_name='google.datastore.v1beta3.PropertyFilter.op', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.datastore.v1beta3.PropertyFilter.value', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _PROPERTYFILTER_OPERATOR,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1446,
+ serialized_end=1788,
+)
+
+
+_GQLQUERY_NAMEDBINDINGSENTRY = _descriptor.Descriptor(
+ name='NamedBindingsEntry',
+ full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry.key', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry.value', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'),
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2002,
+ serialized_end=2099,
+)
+
+_GQLQUERY = _descriptor.Descriptor(
+ name='GqlQuery',
+ full_name='google.datastore.v1beta3.GqlQuery',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='query_string', full_name='google.datastore.v1beta3.GqlQuery.query_string', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='allow_literals', full_name='google.datastore.v1beta3.GqlQuery.allow_literals', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='named_bindings', full_name='google.datastore.v1beta3.GqlQuery.named_bindings', index=2,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='positional_bindings', full_name='google.datastore.v1beta3.GqlQuery.positional_bindings', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_GQLQUERY_NAMEDBINDINGSENTRY, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1791,
+ serialized_end=2099,
+)
+
+
+_GQLQUERYPARAMETER = _descriptor.Descriptor(
+ name='GqlQueryParameter',
+ full_name='google.datastore.v1beta3.GqlQueryParameter',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.datastore.v1beta3.GqlQueryParameter.value', index=0,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='cursor', full_name='google.datastore.v1beta3.GqlQueryParameter.cursor', index=1,
+ number=3, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ _descriptor.OneofDescriptor(
+ name='parameter_type', full_name='google.datastore.v1beta3.GqlQueryParameter.parameter_type',
+ index=0, containing_type=None, fields=[]),
+ ],
+ serialized_start=2101,
+ serialized_end=2206,
+)
+
+
+_QUERYRESULTBATCH = _descriptor.Descriptor(
+ name='QueryResultBatch',
+ full_name='google.datastore.v1beta3.QueryResultBatch',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='skipped_results', full_name='google.datastore.v1beta3.QueryResultBatch.skipped_results', index=0,
+ number=6, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='skipped_cursor', full_name='google.datastore.v1beta3.QueryResultBatch.skipped_cursor', index=1,
+ number=3, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='entity_result_type', full_name='google.datastore.v1beta3.QueryResultBatch.entity_result_type', index=2,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='entity_results', full_name='google.datastore.v1beta3.QueryResultBatch.entity_results', index=3,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='end_cursor', full_name='google.datastore.v1beta3.QueryResultBatch.end_cursor', index=4,
+ number=4, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='more_results', full_name='google.datastore.v1beta3.QueryResultBatch.more_results', index=5,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _QUERYRESULTBATCH_MORERESULTSTYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2209,
+ serialized_end=2676,
+)
+
+_ENTITYRESULT.fields_by_name['entity'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
+_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT
+_QUERY.fields_by_name['projection'].message_type = _PROJECTION
+_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION
+_QUERY.fields_by_name['filter'].message_type = _FILTER
+_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER
+_QUERY.fields_by_name['distinct_on'].message_type = _PROPERTYREFERENCE
+_QUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
+_PROJECTION.fields_by_name['property'].message_type = _PROPERTYREFERENCE
+_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
+_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION
+_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER
+_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER
+_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER
+_FILTER.oneofs_by_name['filter_type'].fields.append(
+ _FILTER.fields_by_name['composite_filter'])
+_FILTER.fields_by_name['composite_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type']
+_FILTER.oneofs_by_name['filter_type'].fields.append(
+ _FILTER.fields_by_name['property_filter'])
+_FILTER.fields_by_name['property_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type']
+_COMPOSITEFILTER.fields_by_name['op'].enum_type = _COMPOSITEFILTER_OPERATOR
+_COMPOSITEFILTER.fields_by_name['filters'].message_type = _FILTER
+_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER
+_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
+_PROPERTYFILTER.fields_by_name['op'].enum_type = _PROPERTYFILTER_OPERATOR
+_PROPERTYFILTER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._VALUE
+_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER
+_GQLQUERY_NAMEDBINDINGSENTRY.fields_by_name['value'].message_type = _GQLQUERYPARAMETER
+_GQLQUERY_NAMEDBINDINGSENTRY.containing_type = _GQLQUERY
+_GQLQUERY.fields_by_name['named_bindings'].message_type = _GQLQUERY_NAMEDBINDINGSENTRY
+_GQLQUERY.fields_by_name['positional_bindings'].message_type = _GQLQUERYPARAMETER
+_GQLQUERYPARAMETER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._VALUE
+_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append(
+ _GQLQUERYPARAMETER.fields_by_name['value'])
+_GQLQUERYPARAMETER.fields_by_name['value'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type']
+_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append(
+ _GQLQUERYPARAMETER.fields_by_name['cursor'])
+_GQLQUERYPARAMETER.fields_by_name['cursor'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type']
+_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE
+_QUERYRESULTBATCH.fields_by_name['entity_results'].message_type = _ENTITYRESULT
+_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE
+_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH
+DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT
+DESCRIPTOR.message_types_by_name['Query'] = _QUERY
+DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION
+DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE
+DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION
+DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER
+DESCRIPTOR.message_types_by_name['Filter'] = _FILTER
+DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER
+DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER
+DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY
+DESCRIPTOR.message_types_by_name['GqlQueryParameter'] = _GQLQUERYPARAMETER
+DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH
+
+EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict(
+ DESCRIPTOR = _ENTITYRESULT,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.EntityResult)
+ ))
+_sym_db.RegisterMessage(EntityResult)
+
+Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict(
+ DESCRIPTOR = _QUERY,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Query)
+ ))
+_sym_db.RegisterMessage(Query)
+
+KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict(
+ DESCRIPTOR = _KINDEXPRESSION,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.KindExpression)
+ ))
+_sym_db.RegisterMessage(KindExpression)
+
+PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYREFERENCE,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyReference)
+ ))
+_sym_db.RegisterMessage(PropertyReference)
+
+Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict(
+ DESCRIPTOR = _PROJECTION,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Projection)
+ ))
+_sym_db.RegisterMessage(Projection)
+
+PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYORDER,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyOrder)
+ ))
+_sym_db.RegisterMessage(PropertyOrder)
+
+Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict(
+ DESCRIPTOR = _FILTER,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Filter)
+ ))
+_sym_db.RegisterMessage(Filter)
+
+CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict(
+ DESCRIPTOR = _COMPOSITEFILTER,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CompositeFilter)
+ ))
+_sym_db.RegisterMessage(CompositeFilter)
+
+PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYFILTER,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyFilter)
+ ))
+_sym_db.RegisterMessage(PropertyFilter)
+
+GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict(
+
+ NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict(
+ DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQuery.NamedBindingsEntry)
+ ))
+ ,
+ DESCRIPTOR = _GQLQUERY,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQuery)
+ ))
+_sym_db.RegisterMessage(GqlQuery)
+_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry)
+
+GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict(
+ DESCRIPTOR = _GQLQUERYPARAMETER,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQueryParameter)
+ ))
+_sym_db.RegisterMessage(GqlQueryParameter)
+
+QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict(
+ DESCRIPTOR = _QUERYRESULTBATCH,
+ __module__ = 'google.datastore.v1beta3.query_pb2'
+ # @@protoc_insertion_point(class_scope:google.datastore.v1beta3.QueryResultBatch)
+ ))
+_sym_db.RegisterMessage(QueryResultBatch)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\nQueryProtoP\001')
+_GQLQUERY_NAMEDBINDINGSENTRY.has_options = True
+_GQLQUERY_NAMEDBINDINGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001')
+# @@protoc_insertion_point(module_scope)
diff --git a/gcloud/datastore/batch.py b/gcloud/datastore/batch.py
index b57a9a5b2599..5d4fc01b1442 100644
--- a/gcloud/datastore/batch.py
+++ b/gcloud/datastore/batch.py
@@ -22,7 +22,6 @@
"""
from gcloud.datastore import helpers
-from gcloud.datastore.key import _projects_equal
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2
@@ -120,7 +119,8 @@ def _add_partial_key_entity_pb(self):
:returns: The newly created entity protobuf that will be
updated and sent with a commit.
"""
- return self.mutations.insert_auto_id.add()
+ new_mutation = self.mutations.add()
+ return new_mutation.insert
def _add_complete_key_entity_pb(self):
"""Adds a new mutation for an entity with a completed key.
@@ -132,7 +132,8 @@ def _add_complete_key_entity_pb(self):
# We use ``upsert`` for entities with completed keys, rather than
# ``insert`` or ``update``, in order not to create race conditions
# based on prior existence / removal of the entity.
- return self.mutations.upsert.add()
+ new_mutation = self.mutations.add()
+ return new_mutation.upsert
def _add_delete_key_pb(self):
"""Adds a new mutation for a key to be deleted.
@@ -141,7 +142,8 @@ def _add_delete_key_pb(self):
:returns: The newly created key protobuf that will be
deleted when sent with a commit.
"""
- return self.mutations.delete.add()
+ new_mutation = self.mutations.add()
+ return new_mutation.delete
@property
def mutations(self):
@@ -153,10 +155,11 @@ def mutations(self):
adding a new mutation. This getter returns the protobuf that has been
built-up so far.
- :rtype: :class:`gcloud.datastore._generated.datastore_pb2.Mutation`
- :returns: The Mutation protobuf to be sent in the commit request.
+ :rtype: iterable
+ :returns: The list of :class:`._generated.datastore_pb2.Mutation`
+ protobufs to be sent in the commit request.
"""
- return self._commit_request.mutation
+ return self._commit_request.mutations
def put(self, entity):
"""Remember an entity's state to be saved during :meth:`commit`.
@@ -173,7 +176,7 @@ def put(self, entity):
"bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
When an entity has a partial key, calling :meth:`commit` sends it as
- an ``insert_auto_id`` mutation and the key is completed. On return,
+ an ``insert`` mutation and the key is completed. On return,
the key for the ``entity`` passed in is updated to match the key ID
assigned by the server.
@@ -186,7 +189,7 @@ def put(self, entity):
if entity.key is None:
raise ValueError("Entity must have a key")
- if not _projects_equal(self.project, entity.key.project):
+ if self.project != entity.key.project:
raise ValueError("Key must be from same project as batch")
if entity.key.is_partial:
@@ -209,10 +212,10 @@ def delete(self, key):
if key.is_partial:
raise ValueError("Key must be complete")
- if not _projects_equal(self.project, key.project):
+ if self.project != key.project:
raise ValueError("Key must be from same project as batch")
- key_pb = helpers._prepare_key_for_request(key.to_protobuf())
+ key_pb = key.to_protobuf()
self._add_delete_key_pb().CopyFrom(key_pb)
def begin(self):
@@ -243,7 +246,7 @@ def _commit(self):
# order) directly ``_partial_key_entities``.
for new_key_pb, entity in zip(updated_keys,
self._partial_key_entities):
- new_id = new_key_pb.path_element[-1].id
+ new_id = new_key_pb.path[-1].id
entity.key = entity.key.completed_key(new_id)
def commit(self):
@@ -294,6 +297,5 @@ def _assign_entity_to_pb(entity_pb, entity):
:param entity: The entity being updated within the batch / transaction.
"""
bare_entity_pb = helpers.entity_to_protobuf(entity)
- key_pb = helpers._prepare_key_for_request(bare_entity_pb.key)
- bare_entity_pb.key.CopyFrom(key_pb)
+ bare_entity_pb.key.CopyFrom(bare_entity_pb.key)
entity_pb.CopyFrom(bare_entity_pb)
diff --git a/gcloud/datastore/client.py b/gcloud/datastore/client.py
index e4e066966445..1e9b40d6a8bf 100644
--- a/gcloud/datastore/client.py
+++ b/gcloud/datastore/client.py
@@ -16,19 +16,16 @@
import os
from gcloud._helpers import _LocalStack
-from gcloud._helpers import _app_engine_id
-from gcloud._helpers import _compute_engine_id
+from gcloud._helpers import _determine_default_project as _base_default_project
from gcloud.client import _ClientProjectMixin
from gcloud.client import Client as _BaseClient
from gcloud.datastore import helpers
from gcloud.datastore.connection import Connection
from gcloud.datastore.batch import Batch
from gcloud.datastore.entity import Entity
-from gcloud.datastore.key import _projects_equal
from gcloud.datastore.key import Key
from gcloud.datastore.query import Query
from gcloud.datastore.transaction import Transaction
-from gcloud.environment_vars import DATASET
from gcloud.environment_vars import GCD_DATASET
@@ -36,11 +33,6 @@
"""Maximum number of iterations to wait for deferred keys."""
-def _get_production_project():
- """Gets the production application ID if it can be inferred."""
- return os.getenv(DATASET)
-
-
def _get_gcd_project():
"""Gets the GCD application ID if it can be inferred."""
return os.getenv(GCD_DATASET)
@@ -52,8 +44,8 @@ def _determine_default_project(project=None):
In implicit case, supports four environments. In order of precedence, the
implicit environments are:
- * GCLOUD_DATASET_ID environment variable
- * DATASTORE_DATASET environment variable (for ``gcd`` testing)
+ * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing)
+ * GCLOUD_PROJECT environment variable
* Google App Engine application ID
* Google Compute Engine project ID (from metadata server)
@@ -63,17 +55,11 @@ def _determine_default_project(project=None):
:rtype: string or ``NoneType``
:returns: Default project if it can be determined.
"""
- if project is None:
- project = _get_production_project()
-
if project is None:
project = _get_gcd_project()
if project is None:
- project = _app_engine_id()
-
- if project is None:
- project = _compute_engine_id()
+ project = _base_default_project(project=project)
return project
@@ -291,7 +277,7 @@ def get_multi(self, keys, missing=None, deferred=None):
ids = set(key.project for key in keys)
for current_id in ids:
- if not _projects_equal(current_id, self.project):
+ if current_id != self.project:
raise ValueError('Keys do not match project')
transaction = self.current_transaction
@@ -418,7 +404,7 @@ def allocate_ids(self, incomplete_key, num_ids):
conn = self.connection
allocated_key_pbs = conn.allocate_ids(incomplete_key.project,
incomplete_key_pbs)
- allocated_ids = [allocated_key_pb.path_element[-1].id
+ allocated_ids = [allocated_key_pb.path[-1].id
for allocated_key_pb in allocated_key_pbs]
return [incomplete_key.completed_key(allocated_id)
for allocated_id in allocated_ids]
diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py
index 5ba3bd4c9294..0810d3303b26 100644
--- a/gcloud/datastore/connection.py
+++ b/gcloud/datastore/connection.py
@@ -20,7 +20,7 @@
from gcloud.environment_vars import GCD_HOST
from gcloud.exceptions import make_exception
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2
-from gcloud.datastore._generated import entity_pb2 as _entity_pb2
+from google.rpc import status_pb2
class Connection(connection.Connection):
@@ -40,18 +40,17 @@ class Connection(connection.Connection):
:attr:`API_BASE_URL`.
"""
- API_BASE_URL = 'https://www.googleapis.com'
+ API_BASE_URL = 'https://datastore.googleapis.com'
"""The base of the API call URL."""
- API_VERSION = 'v1beta2'
+ API_VERSION = 'v1beta3'
"""The version of the API, used in building the API call's URL."""
- API_URL_TEMPLATE = ('{api_base}/datastore/{api_version}'
- '/datasets/{project}/{method}')
+ API_URL_TEMPLATE = ('{api_base}/{api_version}/projects'
+ '/{project}:{method}')
"""A template for the URL of a particular API call."""
- SCOPE = ('https://www.googleapis.com/auth/datastore',
- 'https://www.googleapis.com/auth/userinfo.email')
+ SCOPE = ('https://www.googleapis.com/auth/datastore',)
"""The scopes required for authenticating as a Cloud Datastore consumer."""
def __init__(self, credentials=None, http=None, api_base_url=None):
@@ -91,7 +90,8 @@ def _request(self, project, method, data):
status = headers['status']
if status != '200':
- raise make_exception(headers, content, use_json=False)
+ error_status = status_pb2.Status.FromString(content)
+ raise make_exception(headers, error_status.message, use_json=False)
return content
@@ -193,7 +193,7 @@ def lookup(self, project, key_pbs,
"""
lookup_request = _datastore_pb2.LookupRequest()
_set_read_options(lookup_request, eventual, transaction_id)
- _add_keys_to_request(lookup_request.key, key_pbs)
+ _add_keys_to_request(lookup_request.keys, key_pbs)
lookup_response = self._rpc(project, 'lookup', lookup_request,
_datastore_pb2.LookupResponse)
@@ -264,13 +264,13 @@ def run_query(self, project, query_pb, namespace=None,
_set_read_options(request, eventual, transaction_id)
if namespace:
- request.partition_id.namespace = namespace
+ request.partition_id.namespace_id = namespace
request.query.CopyFrom(query_pb)
response = self._rpc(project, 'runQuery', request,
_datastore_pb2.RunQueryResponse)
return (
- [e.entity for e in response.batch.entity_result],
+ [e.entity for e in response.batch.entity_results],
response.batch.end_cursor, # Assume response always has cursor.
response.batch.more_results,
response.batch.skipped_results,
@@ -288,8 +288,6 @@ def begin_transaction(self, project):
:returns: The serialized transaction that was begun.
"""
request = _datastore_pb2.BeginTransactionRequest()
- request.isolation_level = (
- _datastore_pb2.BeginTransactionRequest.SERIALIZABLE)
response = self._rpc(project, 'beginTransaction', request,
_datastore_pb2.BeginTransactionResponse)
return response.transaction
@@ -363,11 +361,11 @@ def allocate_ids(self, project, key_pbs):
:returns: An equal number of keys, with IDs filled in by the backend.
"""
request = _datastore_pb2.AllocateIdsRequest()
- _add_keys_to_request(request.key, key_pbs)
+ _add_keys_to_request(request.keys, key_pbs)
# Nothing to do with this response, so just execute the method.
response = self._rpc(project, 'allocateIds', request,
_datastore_pb2.AllocateIdsResponse)
- return list(response.key)
+ return list(response.keys)
def _set_read_options(request, eventual, transaction_id):
@@ -388,28 +386,6 @@ def _set_read_options(request, eventual, transaction_id):
opts.transaction = transaction_id
-def _prepare_key_for_request(key_pb): # pragma: NO COVER copied from helpers
- """Add protobuf keys to a request object.
-
- .. note::
- This is copied from `helpers` to avoid a cycle:
- _implicit_environ -> connection -> helpers -> key -> _implicit_environ
-
- :type key_pb: :class:`gcloud.datastore._generated.entity_pb2.Key`
- :param key_pb: A key to be added to a request.
-
- :rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
- :returns: A key which will be added to a request. It will be the
- original if nothing needs to be changed.
- """
- if key_pb.partition_id.dataset_id: # Simple field (string)
- new_key_pb = _entity_pb2.Key()
- new_key_pb.CopyFrom(key_pb)
- new_key_pb.partition_id.ClearField('dataset_id')
- key_pb = new_key_pb
- return key_pb
-
-
def _add_keys_to_request(request_field_pb, key_pbs):
"""Add protobuf keys to a request object.
@@ -420,7 +396,6 @@ def _add_keys_to_request(request_field_pb, key_pbs):
:param key_pbs: The keys to add to a request.
"""
for key_pb in key_pbs:
- key_pb = _prepare_key_for_request(key_pb)
request_field_pb.add().CopyFrom(key_pb)
@@ -435,7 +410,8 @@ def _parse_commit_response(commit_response_pb):
:class:`._generated.entity_pb2.Key` for each incomplete key
that was completed in the commit.
"""
- mut_result = commit_response_pb.mutation_result
- index_updates = mut_result.index_updates
- completed_keys = list(mut_result.insert_auto_id_key)
+ mut_results = commit_response_pb.mutation_results
+ index_updates = commit_response_pb.index_updates
+ completed_keys = [mut_result.key for mut_result in mut_results
+ if mut_result.HasField('key')] # Message field (Key)
return index_updates, completed_keys
diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py
index b9b8863dabcc..9cb73011b261 100644
--- a/gcloud/datastore/helpers.py
+++ b/gcloud/datastore/helpers.py
@@ -19,10 +19,12 @@
import datetime
+from google.protobuf import struct_pb2
+from google.type import latlng_pb2
import six
-from gcloud._helpers import _datetime_from_microseconds
-from gcloud._helpers import _microseconds_from_datetime
+from gcloud._helpers import _datetime_to_pb_timestamp
+from gcloud._helpers import _pb_timestamp_to_datetime
from gcloud.datastore._generated import entity_pb2 as _entity_pb2
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import Key
@@ -30,46 +32,6 @@
__all__ = ('entity_from_protobuf', 'key_from_protobuf')
-def find_true_project(project, connection):
- """Find the true (unaliased) project.
-
- If the given ID already has a 's~' or 'e~' prefix, does nothing.
- Otherwise, looks up a bogus Key('__MissingLookupKind', 1) and reads the
- true prefixed project from the response (either from found or from
- missing).
-
- For some context, see:
- github.com/GoogleCloudPlatform/gcloud-python/pull/528
- github.com/GoogleCloudPlatform/google-cloud-datastore/issues/59
-
- :type project: string
- :param project: The project to un-alias / prefix.
-
- :type connection: :class:`gcloud.datastore.connection.Connection`
- :param connection: A connection provided to connect to the project.
-
- :rtype: string
- :returns: The true / prefixed / un-aliased project.
- """
- if project.startswith('s~') or project.startswith('e~'):
- return project
-
- # Create the bogus Key protobuf to be looked up and remove
- # the project so the backend won't complain.
- bogus_key_pb = Key('__MissingLookupKind', 1,
- project=project).to_protobuf()
- bogus_key_pb.partition_id.ClearField('dataset_id')
-
- found_pbs, missing_pbs, _ = connection.lookup(project, [bogus_key_pb])
- # By not passing in `deferred`, lookup will continue until
- # all results are `found` or `missing`.
- all_pbs = missing_pbs + found_pbs
- # We only asked for one, so should only receive one.
- returned_pb, = all_pbs
-
- return returned_pb.key.partition_id.dataset_id
-
-
def _get_meaning(value_pb, is_list=False):
"""Get the meaning from a protobuf value.
@@ -92,20 +54,20 @@ def _get_meaning(value_pb, is_list=False):
if is_list:
# An empty list will have no values, hence no shared meaning
# set among them.
- if len(value_pb.list_value) == 0:
+ if len(value_pb.array_value.values) == 0:
return None
# We check among all the meanings, some of which may be None,
# the rest which may be enum/int values.
all_meanings = set(_get_meaning(sub_value_pb)
- for sub_value_pb in value_pb.list_value)
+ for sub_value_pb in value_pb.array_value.values)
meaning = all_meanings.pop()
# The value we popped off should have been unique. If not
# then we can't handle a list with values that have more
# than one meaning.
if all_meanings:
raise ValueError('Different meanings set on values '
- 'within a list_value')
+ 'within an array_value')
elif value_pb.meaning: # Simple field (int32)
meaning = value_pb.meaning
@@ -124,9 +86,7 @@ def _new_value_pb(entity_pb, name):
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Value`
:returns: The new ``Value`` protobuf that was added to the entity.
"""
- property_pb = entity_pb.property.add()
- property_pb.name = name
- return property_pb.value
+ return entity_pb.properties.get_or_create(name)
def _property_tuples(entity_pb):
@@ -139,8 +99,7 @@ def _property_tuples(entity_pb):
:returns: An iterator that yields tuples of a name and ``Value``
corresponding to properties on the entity.
"""
- for property_pb in entity_pb.property:
- yield property_pb.name, property_pb.value
+ return six.iteritems(entity_pb.properties)
def entity_from_protobuf(pb):
@@ -173,19 +132,21 @@ def entity_from_protobuf(pb):
if meaning is not None:
entity_meanings[prop_name] = (meaning, value)
- # Check if ``value_pb`` was indexed. Lists need to be special-cased
- # and we require all ``indexed`` values in a list agree.
+ # Check if ``value_pb`` was excluded from index. Lists need to be
+ # special-cased and we require all ``exclude_from_indexes`` values
+ # in a list agree.
if is_list:
- indexed_values = set(value_pb.indexed
- for value_pb in value_pb.list_value)
- if len(indexed_values) != 1:
- raise ValueError('For a list_value, subvalues must either all '
- 'be indexed or all excluded from indexes.')
-
- if not indexed_values.pop():
+ exclude_values = set(value_pb.exclude_from_indexes
+ for value_pb in value_pb.array_value.values)
+ if len(exclude_values) != 1:
+ raise ValueError('For an array_value, subvalues must either '
+ 'all be indexed or all excluded from '
+ 'indexes.')
+
+ if exclude_values.pop():
exclude_from_indexes.append(prop_name)
else:
- if not value_pb.indexed:
+ if value_pb.exclude_from_indexes:
exclude_from_indexes.append(prop_name)
entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes)
@@ -220,10 +181,10 @@ def entity_to_protobuf(entity):
# Add index information to protobuf.
if name in entity.exclude_from_indexes:
if not value_is_list:
- value_pb.indexed = False
+ value_pb.exclude_from_indexes = True
- for sub_value in value_pb.list_value:
- sub_value.indexed = False
+ for sub_value in value_pb.array_value.values:
+ sub_value.exclude_from_indexes = True
# Add meaning information to protobuf.
if name in entity._meanings:
@@ -233,7 +194,7 @@ def entity_to_protobuf(entity):
if orig_value is value:
# For lists, we set meaning on each sub-element.
if value_is_list:
- for sub_value_pb in value_pb.list_value:
+ for sub_value_pb in value_pb.array_value.values:
sub_value_pb.meaning = meaning
else:
value_pb.meaning = meaning
@@ -254,7 +215,7 @@ def key_from_protobuf(pb):
:returns: a new `Key` instance
"""
path_args = []
- for element in pb.path_element:
+ for element in pb.path:
path_args.append(element.kind)
if element.id: # Simple field (int64)
path_args.append(element.id)
@@ -264,11 +225,11 @@ def key_from_protobuf(pb):
path_args.append(element.name)
project = None
- if pb.partition_id.dataset_id: # Simple field (string)
- project = pb.partition_id.dataset_id
+ if pb.partition_id.project_id: # Simple field (string)
+ project = pb.partition_id.project_id
namespace = None
- if pb.partition_id.namespace: # Simple field (string)
- namespace = pb.partition_id.namespace
+ if pb.partition_id.namespace_id: # Simple field (string)
+ namespace = pb.partition_id.namespace_id
return Key(*path_args, namespace=namespace, project=project)
@@ -306,8 +267,8 @@ def _pb_attr_value(val):
"""
if isinstance(val, datetime.datetime):
- name = 'timestamp_microseconds'
- value = _microseconds_from_datetime(val)
+ name = 'timestamp'
+ value = _datetime_to_pb_timestamp(val)
elif isinstance(val, Key):
name, value = 'key', val.to_protobuf()
elif isinstance(val, bool):
@@ -323,7 +284,11 @@ def _pb_attr_value(val):
elif isinstance(val, Entity):
name, value = 'entity', val
elif isinstance(val, list):
- name, value = 'list', val
+ name, value = 'array', val
+ elif isinstance(val, GeoPoint):
+ name, value = 'geo_point', val.to_protobuf()
+ elif val is None:
+ name, value = 'null', struct_pb2.NULL_VALUE
else:
raise ValueError("Unknown protobuf attr type %s" % type(val))
@@ -344,37 +309,48 @@ def _get_value_from_value_pb(value_pb):
:param value_pb: The Value Protobuf.
:returns: The value provided by the Protobuf.
+ :raises: :class:`ValueError ` if no value type
+ has been set.
"""
- result = None
- # Simple field (int64)
- if value_pb.HasField('timestamp_microseconds_value'):
- microseconds = value_pb.timestamp_microseconds_value
- result = _datetime_from_microseconds(microseconds)
+ value_type = value_pb.WhichOneof('value_type')
+
+ if value_type == 'timestamp_value':
+ result = _pb_timestamp_to_datetime(value_pb.timestamp_value)
- elif value_pb.HasField('key_value'): # Message field (Key)
+ elif value_type == 'key_value':
result = key_from_protobuf(value_pb.key_value)
- elif value_pb.HasField('boolean_value'): # Simple field (bool)
+ elif value_type == 'boolean_value':
result = value_pb.boolean_value
- elif value_pb.HasField('double_value'): # Simple field (double)
+ elif value_type == 'double_value':
result = value_pb.double_value
- elif value_pb.HasField('integer_value'): # Simple field (int64)
+ elif value_type == 'integer_value':
result = value_pb.integer_value
- elif value_pb.HasField('string_value'): # Simple field (string)
+ elif value_type == 'string_value':
result = value_pb.string_value
- elif value_pb.HasField('blob_value'): # Simple field (bytes)
+ elif value_type == 'blob_value':
result = value_pb.blob_value
- elif value_pb.HasField('entity_value'): # Message field (Entity)
+ elif value_type == 'entity_value':
result = entity_from_protobuf(value_pb.entity_value)
- elif value_pb.list_value:
+ elif value_type == 'array_value':
result = [_get_value_from_value_pb(value)
- for value in value_pb.list_value]
+ for value in value_pb.array_value.values]
+
+ elif value_type == 'geo_point_value':
+ result = GeoPoint(value_pb.geo_point_value.latitude,
+ value_pb.geo_point_value.longitude)
+
+ elif value_type == 'null_value':
+ result = None
+
+ else:
+ raise ValueError('Value protobuf did not have any value set')
return result
@@ -396,47 +372,64 @@ def _set_protobuf_value(value_pb, val):
:class:`gcloud.datastore.entity.Entity`
:param val: The value to be assigned.
"""
- if val is None:
- value_pb.Clear()
- return
-
attr, val = _pb_attr_value(val)
if attr == 'key_value':
value_pb.key_value.CopyFrom(val)
+ elif attr == 'timestamp_value':
+ value_pb.timestamp_value.CopyFrom(val)
elif attr == 'entity_value':
entity_pb = entity_to_protobuf(val)
value_pb.entity_value.CopyFrom(entity_pb)
- elif attr == 'list_value':
- l_pb = value_pb.list_value
+ elif attr == 'array_value':
+ l_pb = value_pb.array_value.values
for item in val:
i_pb = l_pb.add()
_set_protobuf_value(i_pb, item)
+ elif attr == 'geo_point_value':
+ value_pb.geo_point_value.CopyFrom(val)
else: # scalar, just assign
setattr(value_pb, attr, val)
-def _prepare_key_for_request(key_pb):
- """Add protobuf keys to a request object.
+class GeoPoint(object):
+ """Simple container for a geo point value.
- :type key_pb: :class:`gcloud.datastore._generated.entity_pb2.Key`
- :param key_pb: A key to be added to a request.
+ :type latitude: float
+ :param latitude: Latitude of a point.
- :rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
- :returns: A key which will be added to a request. It will be the
- original if nothing needs to be changed.
+ :type longitude: float
+ :param longitude: Longitude of a point.
"""
- if key_pb.partition_id.dataset_id: # Simple field (string)
- # We remove the dataset_id from the protobuf. This is because
- # the backend fails a request if the key contains un-prefixed
- # project. The backend fails because requests to
- # /datastore/.../datasets/foo/...
- # and
- # /datastore/.../datasets/s~foo/...
- # both go to the datastore given by 's~foo'. So if the key
- # protobuf in the request body has dataset_id='foo', the
- # backend will reject since 'foo' != 's~foo'.
- new_key_pb = _entity_pb2.Key()
- new_key_pb.CopyFrom(key_pb)
- new_key_pb.partition_id.ClearField('dataset_id')
- key_pb = new_key_pb
- return key_pb
+
+ def __init__(self, latitude, longitude):
+ self.latitude = latitude
+ self.longitude = longitude
+
+ def to_protobuf(self):
+ """Convert the current object to protobuf.
+
+ :rtype: :class:`google.type.latlng_pb2.LatLng`.
+ :returns: The current point as a protobuf.
+ """
+ return latlng_pb2.LatLng(latitude=self.latitude,
+ longitude=self.longitude)
+
+ def __eq__(self, other):
+ """Compare two geo points for equality.
+
+ :rtype: boolean
+ :returns: True if the points compare equal, else False.
+ """
+ if not isinstance(other, GeoPoint):
+ return False
+
+ return (self.latitude == other.latitude and
+ self.longitude == other.longitude)
+
+ def __ne__(self, other):
+ """Compare two geo points for inequality.
+
+ :rtype: boolean
+ :returns: False if the points compare equal, else True.
+ """
+ return not self.__eq__(other)
diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py
index 7a8e2848b7f2..a1356dca1481 100644
--- a/gcloud/datastore/key.py
+++ b/gcloud/datastore/key.py
@@ -86,7 +86,7 @@ def __eq__(self, other):
return False
return (self.flat_path == other.flat_path and
- _projects_equal(self.project, other.project) and
+ self.project == other.project and
self.namespace == other.namespace)
def __ne__(self, other):
@@ -240,13 +240,13 @@ def to_protobuf(self):
:returns: The protobuf representing the key.
"""
key = _entity_pb2.Key()
- key.partition_id.dataset_id = self.project
+ key.partition_id.project_id = self.project
if self.namespace:
- key.partition_id.namespace = self.namespace
+ key.partition_id.namespace_id = self.namespace
for item in self.path:
- element = key.path_element.add()
+ element = key.path.add()
if 'kind' in item:
element.kind = item['kind']
if 'id' in item:
@@ -402,55 +402,3 @@ def _validate_project(project, parent):
raise ValueError("A Key must have a project set.")
return project
-
-
-def _projects_equal(project1, project2):
- """Compares two projects for fuzzy equality.
-
- Each may be prefixed or unprefixed (but not null, since project
- is required on a key). The only allowed prefixes are 's~' and 'e~'.
-
- Two identical prefixed match
-
- >>> 's~foo' == 's~foo'
- >>> 'e~bar' == 'e~bar'
-
- while non-identical prefixed don't
-
- >>> 's~foo' != 's~bar'
- >>> 's~foo' != 'e~foo'
-
- As for non-prefixed, they can match other non-prefixed or
- prefixed:
-
- >>> 'foo' == 'foo'
- >>> 'foo' == 's~foo'
- >>> 'foo' == 'e~foo'
- >>> 'foo' != 'bar'
- >>> 'foo' != 's~bar'
-
- (Ties are resolved since 'foo' can only be an alias for one of
- s~foo or e~foo in the backend.)
-
- :type project1: string
- :param project1: A project.
-
- :type project2: string
- :param project2: A project.
-
- :rtype: bool
- :returns: Boolean indicating if the projects are the same.
- """
- if project1 == project2:
- return True
-
- if project1.startswith('s~') or project1.startswith('e~'):
- # If `project1` is prefixed and not matching, then the only way
- # they can match is if `project2` is unprefixed.
- return project1[2:] == project2
- elif project2.startswith('s~') or project2.startswith('e~'):
- # Here we know `project1` is unprefixed and `project2`
- # is prefixed.
- return project1 == project2[2:]
-
- return False
diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py
index 7939028dac75..15519a01f15b 100644
--- a/gcloud/datastore/query.py
+++ b/gcloud/datastore/query.py
@@ -56,8 +56,8 @@ class Query(object):
:param order: field names used to order query results. Prepend '-'
to a field name to sort it in descending order.
- :type group_by: sequence of string
- :param group_by: field names used to group query results.
+ :type distinct_on: sequence of string
+ :param distinct_on: field names used to group query results.
:raises: ValueError if ``project`` is not passed and no implicit
default is set.
@@ -81,7 +81,7 @@ def __init__(self,
filters=(),
projection=(),
order=(),
- group_by=()):
+ distinct_on=()):
self._client = client
self._kind = kind
@@ -94,7 +94,7 @@ def __init__(self,
self.add_filter(property_name, operator, value)
self._projection = _ensure_tuple_or_list('projection', projection)
self._order = _ensure_tuple_or_list('order', order)
- self._group_by = _ensure_tuple_or_list('group_by', group_by)
+ self._distinct_on = _ensure_tuple_or_list('distinct_on', distinct_on)
@property
def project(self):
@@ -287,15 +287,15 @@ def order(self, value):
self._order[:] = value
@property
- def group_by(self):
+ def distinct_on(self):
"""Names of fields used to group query results.
:rtype: sequence of string
"""
- return self._group_by[:]
+ return self._distinct_on[:]
- @group_by.setter
- def group_by(self, value):
+ @distinct_on.setter
+ def distinct_on(self, value):
"""Set fields used to group query results.
:type value: string or sequence of strings
@@ -304,7 +304,7 @@ def group_by(self, value):
"""
if isinstance(value, str):
value = [value]
- self._group_by[:] = value
+ self._distinct_on[:] = value
def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None,
client=None):
@@ -411,7 +411,7 @@ def next_page(self):
pb.end_cursor = base64.urlsafe_b64decode(end_cursor)
if self._limit is not None:
- pb.limit = self._limit
+ pb.limit.value = self._limit
pb.offset = self._offset
@@ -486,35 +486,33 @@ def _pb_from_query(query):
pb.kind.add().name = query.kind
composite_filter = pb.filter.composite_filter
- composite_filter.operator = _query_pb2.CompositeFilter.AND
+ composite_filter.op = _query_pb2.CompositeFilter.AND
if query.ancestor:
- ancestor_pb = helpers._prepare_key_for_request(
- query.ancestor.to_protobuf())
+ ancestor_pb = query.ancestor.to_protobuf()
# Filter on __key__ HAS_ANCESTOR == ancestor.
- ancestor_filter = composite_filter.filter.add().property_filter
+ ancestor_filter = composite_filter.filters.add().property_filter
ancestor_filter.property.name = '__key__'
- ancestor_filter.operator = _query_pb2.PropertyFilter.HAS_ANCESTOR
+ ancestor_filter.op = _query_pb2.PropertyFilter.HAS_ANCESTOR
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)
for property_name, operator, value in query.filters:
pb_op_enum = query.OPERATORS.get(operator)
# Add the specific filter
- property_filter = composite_filter.filter.add().property_filter
+ property_filter = composite_filter.filters.add().property_filter
property_filter.property.name = property_name
- property_filter.operator = pb_op_enum
+ property_filter.op = pb_op_enum
# Set the value to filter on based on the type.
if property_name == '__key__':
key_pb = value.to_protobuf()
- property_filter.value.key_value.CopyFrom(
- helpers._prepare_key_for_request(key_pb))
+ property_filter.value.key_value.CopyFrom(key_pb)
else:
helpers._set_protobuf_value(property_filter.value, value)
- if not composite_filter.filter:
+ if not composite_filter.filters:
pb.ClearField('filter')
for prop in query.order:
@@ -527,7 +525,7 @@ def _pb_from_query(query):
property_order.property.name = prop
property_order.direction = property_order.ASCENDING
- for group_by_name in query.group_by:
- pb.group_by.add().name = group_by_name
+ for distinct_on_name in query.distinct_on:
+ pb.distinct_on.add().name = distinct_on_name
return pb
diff --git a/gcloud/datastore/test_batch.py b/gcloud/datastore/test_batch.py
index 316070bcfeb1..4636f275979f 100644
--- a/gcloud/datastore/test_batch.py
+++ b/gcloud/datastore/test_batch.py
@@ -38,7 +38,9 @@ def test_ctor(self):
self.assertEqual(batch.namespace, _NAMESPACE)
self.assertTrue(batch._id is None)
self.assertEqual(batch._status, batch._INITIAL)
- self.assertTrue(isinstance(batch.mutations, datastore_pb2.Mutation))
+ self.assertTrue(isinstance(batch._commit_request,
+ datastore_pb2.CommitRequest))
+ self.assertTrue(batch.mutations is batch._commit_request.mutations)
self.assertEqual(batch._partial_key_entities, [])
def test_current(self):
@@ -90,7 +92,7 @@ def test_put_entity_w_partial_key(self):
batch.put(entity)
- mutated_entity = _mutated_pb(self, batch.mutations, 'insert_auto_id')
+ mutated_entity = _mutated_pb(self, batch.mutations, 'insert')
self.assertEqual(mutated_entity.key, key._key)
self.assertEqual(batch._partial_key_entities, [entity])
@@ -118,44 +120,13 @@ def test_put_entity_w_completed_key(self):
prop_dict = dict(_property_tuples(mutated_entity))
self.assertEqual(len(prop_dict), 3)
- self.assertTrue(prop_dict['foo'].indexed)
- self.assertFalse(prop_dict['baz'].indexed)
- self.assertTrue(prop_dict['spam'].indexed)
- self.assertFalse(prop_dict['spam'].list_value[0].indexed)
- self.assertFalse(prop_dict['spam'].list_value[1].indexed)
- self.assertFalse(prop_dict['spam'].list_value[2].indexed)
- self.assertFalse('frotz' in prop_dict)
-
- def test_put_entity_w_completed_key_prefixed_project(self):
- from gcloud.datastore.helpers import _property_tuples
-
- _PROJECT = 'PROJECT'
- _PROPERTIES = {
- 'foo': 'bar',
- 'baz': 'qux',
- 'spam': [1, 2, 3],
- 'frotz': [], # will be ignored
- }
- connection = _Connection()
- client = _Client(_PROJECT, connection)
- batch = self._makeOne(client)
- entity = _Entity(_PROPERTIES)
- entity.exclude_from_indexes = ('baz', 'spam')
- key = entity.key = _Key('s~' + _PROJECT)
-
- batch.put(entity)
-
- mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
- self.assertEqual(mutated_entity.key, key._key)
-
- prop_dict = dict(_property_tuples(mutated_entity))
- self.assertEqual(len(prop_dict), 3)
- self.assertTrue(prop_dict['foo'].indexed)
- self.assertFalse(prop_dict['baz'].indexed)
- self.assertTrue(prop_dict['spam'].indexed)
- self.assertFalse(prop_dict['spam'].list_value[0].indexed)
- self.assertFalse(prop_dict['spam'].list_value[1].indexed)
- self.assertFalse(prop_dict['spam'].list_value[2].indexed)
+ self.assertFalse(prop_dict['foo'].exclude_from_indexes)
+ self.assertTrue(prop_dict['baz'].exclude_from_indexes)
+ self.assertFalse(prop_dict['spam'].exclude_from_indexes)
+ spam_values = prop_dict['spam'].array_value.values
+ self.assertTrue(spam_values[0].exclude_from_indexes)
+ self.assertTrue(spam_values[1].exclude_from_indexes)
+ self.assertTrue(spam_values[2].exclude_from_indexes)
self.assertFalse('frotz' in prop_dict)
def test_delete_w_partial_key(self):
@@ -189,18 +160,6 @@ def test_delete_w_completed_key(self):
mutated_key = _mutated_pb(self, batch.mutations, 'delete')
self.assertEqual(mutated_key, key._key)
- def test_delete_w_completed_key_w_prefixed_project(self):
- _PROJECT = 'PROJECT'
- connection = _Connection()
- client = _Client(_PROJECT, connection)
- batch = self._makeOne(client)
- key = _Key('s~' + _PROJECT)
-
- batch.delete(key)
-
- mutated_key = _mutated_pb(self, batch.mutations, 'delete')
- self.assertEqual(mutated_key, key._key)
-
def test_begin(self):
_PROJECT = 'PROJECT'
client = _Client(_PROJECT, None)
@@ -346,7 +305,7 @@ def __init__(self, id):
class _KeyPB(object):
def __init__(self, id):
- self.path_element = [_PathElementPB(id)]
+ self.path = [_PathElementPB(id)]
class _Connection(object):
@@ -388,9 +347,9 @@ def to_protobuf(self):
from gcloud.datastore._generated import entity_pb2
key = self._key = entity_pb2.Key()
# Don't assign it, because it will just get ripped out
- # key.partition_id.dataset_id = self.project
+ # key.partition_id.project_id = self.project
- element = key.path_element.add()
+ element = key.path.add()
element.kind = self._kind
if self._id is not None:
element.id = self._id
@@ -424,20 +383,18 @@ def current_batch(self):
return self._batches[0]
-def _assert_num_mutations(test_case, mutation_pb, num_mutations):
- total_mutations = (len(mutation_pb.upsert) +
- len(mutation_pb.update) +
- len(mutation_pb.insert) +
- len(mutation_pb.insert_auto_id) +
- len(mutation_pb.delete))
- test_case.assertEqual(total_mutations, num_mutations)
+def _assert_num_mutations(test_case, mutation_pb_list, num_mutations):
+ test_case.assertEqual(len(mutation_pb_list), num_mutations)
-def _mutated_pb(test_case, mutation_pb, mutation_type):
+def _mutated_pb(test_case, mutation_pb_list, mutation_type):
# Make sure there is only one mutation.
- _assert_num_mutations(test_case, mutation_pb, 1)
+ _assert_num_mutations(test_case, mutation_pb_list, 1)
+
+ # We grab the only mutation.
+ mutated_pb = mutation_pb_list[0]
+ # Then check if it is the correct type.
+ test_case.assertEqual(mutated_pb.WhichOneof('operation'),
+ mutation_type)
- mutated_pbs = getattr(mutation_pb, mutation_type, [])
- # Make sure we have exactly one protobuf.
- test_case.assertEqual(len(mutated_pbs), 1)
- return mutated_pbs[0]
+ return getattr(mutated_pb, mutation_type)
diff --git a/gcloud/datastore/test_client.py b/gcloud/datastore/test_client.py
index 90cbbad6183e..a5e4acad608d 100644
--- a/gcloud/datastore/test_client.py
+++ b/gcloud/datastore/test_client.py
@@ -20,8 +20,8 @@ def _make_entity_pb(project, kind, integer_id, name=None, str_val=None):
from gcloud.datastore.helpers import _new_value_pb
entity_pb = entity_pb2.Entity()
- entity_pb.key.partition_id.dataset_id = project
- path_element = entity_pb.key.path_element.add()
+ entity_pb.key.partition_id.project_id = project
+ path_element = entity_pb.key.path.add()
path_element.kind = kind
path_element.id = integer_id
if name is not None and str_val is not None:
@@ -31,33 +31,6 @@ def _make_entity_pb(project, kind, integer_id, name=None, str_val=None):
return entity_pb
-class Test__get_production_project(unittest2.TestCase):
-
- def _callFUT(self):
- from gcloud.datastore.client import _get_production_project
- return _get_production_project()
-
- def test_no_value(self):
- import os
- from gcloud._testing import _Monkey
-
- environ = {}
- with _Monkey(os, getenv=environ.get):
- project = self._callFUT()
- self.assertEqual(project, None)
-
- def test_value_set(self):
- import os
- from gcloud._testing import _Monkey
- from gcloud.datastore.client import DATASET
-
- MOCK_PROJECT = object()
- environ = {DATASET: MOCK_PROJECT}
- with _Monkey(os, getenv=environ.get):
- project = self._callFUT()
- self.assertEqual(project, MOCK_PROJECT)
-
-
class Test__get_gcd_project(unittest2.TestCase):
def _callFUT(self):
@@ -88,81 +61,58 @@ def test_value_set(self):
class Test__determine_default_project(unittest2.TestCase):
def _callFUT(self, project=None):
- from gcloud.datastore.client import _determine_default_project
+ from gcloud.datastore.client import (
+ _determine_default_project)
return _determine_default_project(project=project)
- def _determine_default_helper(self, prod=None, gcd=None, gae=None,
- gce=None, project=None):
+ def _determine_default_helper(self, gcd=None, fallback=None,
+ project_called=None):
from gcloud._testing import _Monkey
from gcloud.datastore import client
_callers = []
- def prod_mock():
- _callers.append('prod_mock')
- return prod
-
def gcd_mock():
_callers.append('gcd_mock')
return gcd
- def gae_mock():
- _callers.append('gae_mock')
- return gae
-
- def gce_mock():
- _callers.append('gce_mock')
- return gce
+ def fallback_mock(project=None):
+ _callers.append(('fallback_mock', project))
+ return fallback
patched_methods = {
- '_get_production_project': prod_mock,
'_get_gcd_project': gcd_mock,
- '_app_engine_id': gae_mock,
- '_compute_engine_id': gce_mock,
+ '_base_default_project': fallback_mock,
}
with _Monkey(client, **patched_methods):
- returned_project = self._callFUT(project)
+ returned_project = self._callFUT(project_called)
return returned_project, _callers
def test_no_value(self):
project, callers = self._determine_default_helper()
self.assertEqual(project, None)
- self.assertEqual(callers,
- ['prod_mock', 'gcd_mock', 'gae_mock', 'gce_mock'])
+ self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)])
def test_explicit(self):
PROJECT = object()
project, callers = self._determine_default_helper(
- project=PROJECT)
+ project_called=PROJECT)
self.assertEqual(project, PROJECT)
self.assertEqual(callers, [])
- def test_prod(self):
- PROJECT = object()
- project, callers = self._determine_default_helper(prod=PROJECT)
- self.assertEqual(project, PROJECT)
- self.assertEqual(callers, ['prod_mock'])
-
def test_gcd(self):
PROJECT = object()
project, callers = self._determine_default_helper(gcd=PROJECT)
self.assertEqual(project, PROJECT)
- self.assertEqual(callers, ['prod_mock', 'gcd_mock'])
-
- def test_gae(self):
- PROJECT = object()
- project, callers = self._determine_default_helper(gae=PROJECT)
- self.assertEqual(project, PROJECT)
- self.assertEqual(callers, ['prod_mock', 'gcd_mock', 'gae_mock'])
+ self.assertEqual(callers, ['gcd_mock'])
- def test_gce(self):
+ def test_fallback(self):
PROJECT = object()
- project, callers = self._determine_default_helper(gce=PROJECT)
+ project, callers = self._determine_default_helper(fallback=PROJECT)
self.assertEqual(project, PROJECT)
- self.assertEqual(callers,
- ['prod_mock', 'gcd_mock', 'gae_mock', 'gce_mock'])
+ self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)])
class TestClient(unittest2.TestCase):
@@ -195,7 +145,7 @@ def test_ctor_w_project_no_environ(self):
# Some environments (e.g. AppVeyor CI) run in GCE, so
# this test would fail artificially.
- with _Monkey(_MUT, _compute_engine_id=lambda: None):
+ with _Monkey(_MUT, _base_default_project=lambda project: None):
self.assertRaises(EnvironmentError, self._makeOne, None)
def test_ctor_w_implicit_inputs(self):
@@ -205,10 +155,15 @@ def test_ctor_w_implicit_inputs(self):
OTHER = 'other'
creds = object()
+ default_called = []
+
+ def fallback_mock(project):
+ default_called.append(project)
+ return project or OTHER
klass = self._getTargetClass()
with _Monkey(_MUT,
- _determine_default_project=lambda x: x or OTHER):
+ _determine_default_project=fallback_mock):
with _Monkey(_base_client,
get_credentials=lambda: creds):
client = klass()
@@ -219,6 +174,7 @@ def test_ctor_w_implicit_inputs(self):
self.assertTrue(client.connection.http is None)
self.assertTrue(client.current_batch is None)
self.assertTrue(client.current_transaction is None)
+ self.assertEqual(default_called, [None])
def test_ctor_w_explicit_inputs(self):
OTHER = 'other'
@@ -322,8 +278,8 @@ def test_get_multi_miss_w_missing(self):
# Make a missing entity pb to be returned from mock backend.
missed = entity_pb2.Entity()
- missed.key.partition_id.dataset_id = self.PROJECT
- path_element = missed.key.path_element.add()
+ missed.key.partition_id.project_id = self.PROJECT
+ path_element = missed.key.path.add()
path_element.kind = KIND
path_element.id = ID
@@ -500,55 +456,6 @@ def test_get_multi_hit_multiple_keys_different_project(self):
with self.assertRaises(ValueError):
client.get_multi([key1, key2])
- def test_get_multi_diff_prefixes(self):
- from gcloud.datastore.key import Key
-
- PROJECT1 = 'PROJECT'
- PROJECT2 = 'e~PROJECT'
- PROJECT3 = 's~PROJECT'
- KIND = 'Kind'
- ID1 = 1234
- ID2 = 2345
- ID3 = 3456
-
- # Make found entity pbs to be returned from mock backend.
- entity_pb1 = _make_entity_pb(PROJECT1, KIND, ID1)
- entity_pb2 = _make_entity_pb(PROJECT2, KIND, ID2)
- entity_pb3 = _make_entity_pb(PROJECT3, KIND, ID3)
-
- creds = object()
- client = self._makeOne(credentials=creds)
- client.connection._add_lookup_result([entity_pb1,
- entity_pb2,
- entity_pb3])
-
- key1 = Key(KIND, ID1, project=PROJECT1)
- key2 = Key(KIND, ID2, project=PROJECT2)
- key3 = Key(KIND, ID3, project=PROJECT3)
-
- retrieved_all = client.get_multi([key1, key2, key3])
- retrieved1, retrieved2, retrieved3 = retrieved_all
-
- # Check values & keys match.
- self.assertEqual(retrieved1.key.path, key1.path)
- self.assertEqual(retrieved2.key.path, key2.path)
- self.assertEqual(retrieved3.key.path, key3.path)
-
- def test_get_multi_diff_projects_w_prefix(self):
- from gcloud.datastore.key import Key
-
- PROJECT1 = 'e~PROJECT'
- PROJECT2 = 's~PROJECT-ALT'
-
- key1 = Key('KIND', 1234, project=PROJECT1)
- key2 = Key('KIND', 1234, project=PROJECT2)
-
- creds = object()
- client = self._makeOne(credentials=creds)
-
- with self.assertRaises(ValueError):
- client.get_multi([key1, key2])
-
def test_get_multi_max_loops(self):
from gcloud._testing import _Monkey
from gcloud.datastore import client as _MUT
@@ -612,6 +519,7 @@ def test_put_multi_no_batch_w_partial_key(self):
from gcloud.datastore.test_batch import _Entity
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _KeyPB
+ from gcloud.datastore.test_batch import _mutated_pb
entity = _Entity(foo=u'bar')
key = entity.key = _Key(self.PROJECT)
@@ -628,15 +536,16 @@ def test_put_multi_no_batch_w_partial_key(self):
(project,
commit_req, transaction_id) = client.connection._commit_cw[0]
self.assertEqual(project, self.PROJECT)
- inserts = list(commit_req.mutation.insert_auto_id)
- self.assertEqual(len(inserts), 1)
- self.assertEqual(inserts[0].key, key.to_protobuf())
- prop_list = list(_property_tuples(inserts[0]))
+ mutated_entity = _mutated_pb(self, commit_req.mutations, 'insert')
+ self.assertEqual(mutated_entity.key, key.to_protobuf())
+
+ prop_list = list(_property_tuples(mutated_entity))
self.assertTrue(len(prop_list), 1)
name, value_pb = prop_list[0]
self.assertEqual(name, 'foo')
self.assertEqual(value_pb.string_value, u'bar')
+
self.assertTrue(transaction_id is None)
def test_put_multi_existing_batch_w_completed_key(self):
@@ -688,6 +597,7 @@ def test_delete_multi_no_keys(self):
def test_delete_multi_no_batch(self):
from gcloud.datastore.test_batch import _Key
+ from gcloud.datastore.test_batch import _mutated_pb
key = _Key(self.PROJECT)
@@ -701,7 +611,9 @@ def test_delete_multi_no_batch(self):
(project,
commit_req, transaction_id) = client.connection._commit_cw[0]
self.assertEqual(project, self.PROJECT)
- self.assertEqual(list(commit_req.mutation.delete), [key.to_protobuf()])
+
+ mutated_key = _mutated_pb(self, commit_req.mutations, 'delete')
+ self.assertEqual(mutated_key, key.to_protobuf())
self.assertTrue(transaction_id is None)
def test_delete_multi_w_existing_batch(self):
@@ -909,7 +821,7 @@ def test_query_explicit(self):
FILTERS = [('PROPERTY', '==', 'VALUE')]
PROJECTION = ['__key__']
ORDER = ['PROPERTY']
- GROUP_BY = ['GROUPBY']
+ DISTINCT_ON = ['DISTINCT_ON']
creds = object()
client = self._makeOne(credentials=creds)
@@ -922,7 +834,7 @@ def test_query_explicit(self):
filters=FILTERS,
projection=PROJECTION,
order=ORDER,
- group_by=GROUP_BY,
+ distinct_on=DISTINCT_ON,
)
self.assertTrue(isinstance(query, _Dummy))
@@ -935,7 +847,7 @@ def test_query_explicit(self):
'filters': FILTERS,
'projection': PROJECTION,
'order': ORDER,
- 'group_by': GROUP_BY,
+ 'distinct_on': DISTINCT_ON,
}
self.assertEqual(query.kwargs, kwargs)
diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py
index 16b3c6ae289c..ab8beacc24e9 100644
--- a/gcloud/datastore/test_connection.py
+++ b/gcloud/datastore/test_connection.py
@@ -47,10 +47,9 @@ def _verifyProtobufCall(self, called_with, URI, conn):
conn.USER_AGENT)
def test_default_url(self):
- from gcloud.connection import API_BASE_URL
-
+ klass = self._getTargetClass()
conn = self._makeOne()
- self.assertEqual(conn.api_base_url, API_BASE_URL)
+ self.assertEqual(conn.api_base_url, klass.API_BASE_URL)
def test_custom_url_from_env(self):
import os
@@ -143,11 +142,9 @@ def test__request_w_200(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- METHOD,
+ 'projects',
+ PROJECT + ':' + METHOD,
])
http = conn._http = Http({'status': '200'}, 'CONTENT')
self.assertEqual(conn._request(PROJECT, METHOD, DATA), 'CONTENT')
@@ -156,12 +153,17 @@ def test__request_w_200(self):
def test__request_not_200(self):
from gcloud.exceptions import BadRequest
+ from google.rpc import status_pb2
+
+ error = status_pb2.Status()
+ error.message = 'Entity value is indexed.'
+ error.code = 9 # FAILED_PRECONDITION
PROJECT = 'PROJECT'
METHOD = 'METHOD'
DATA = 'DATA'
conn = self._makeOne()
- conn._http = Http({'status': '400'}, b'Entity value is indexed.')
+ conn._http = Http({'status': '400'}, error.SerializeToString())
with self.assertRaises(BadRequest) as e:
conn._request(PROJECT, METHOD, DATA)
expected_message = '400 Entity value is indexed.'
@@ -189,11 +191,9 @@ def FromString(cls, pb):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- METHOD,
+ 'projects',
+ PROJECT + ':' + METHOD,
])
http = conn._http = Http({'status': '200'}, 'CONTENT')
response = conn._rpc(PROJECT, METHOD, ReqPB(), RspPB)
@@ -208,11 +208,9 @@ def test_build_api_url_w_default_base_version(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- METHOD,
+ 'projects',
+ PROJECT + ':' + METHOD,
])
self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI)
@@ -224,11 +222,9 @@ def test_build_api_url_w_explicit_base_version(self):
conn = self._makeOne()
URI = '/'.join([
BASE,
- 'datastore',
VER,
- 'datasets',
- PROJECT,
- METHOD,
+ 'projects',
+ PROJECT + ':' + METHOD,
])
self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER),
URI)
@@ -242,11 +238,9 @@ def test_lookup_single_key_empty_response(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb])
@@ -258,9 +252,9 @@ def test_lookup_single_key_empty_response(self):
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 1)
- _compare_key_pb_after_request(self, key_pb, keys[0])
+ self.assertEqual(key_pb, keys[0])
def test_lookup_single_key_empty_response_w_eventual(self):
from gcloud.datastore._generated import datastore_pb2
@@ -271,11 +265,9 @@ def test_lookup_single_key_empty_response_w_eventual(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb],
@@ -288,9 +280,9 @@ def test_lookup_single_key_empty_response_w_eventual(self):
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 1)
- _compare_key_pb_after_request(self, key_pb, keys[0])
+ self.assertEqual(key_pb, keys[0])
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.EVENTUAL)
self.assertEqual(request.read_options.transaction, b'')
@@ -313,11 +305,9 @@ def test_lookup_single_key_empty_response_w_transaction(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb],
@@ -330,9 +320,9 @@ def test_lookup_single_key_empty_response_w_transaction(self):
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 1)
- _compare_key_pb_after_request(self, key_pb, keys[0])
+ self.assertEqual(key_pb, keys[0])
self.assertEqual(request.read_options.transaction, TRANSACTION)
def test_lookup_single_key_nonempty_response(self):
@@ -348,26 +338,24 @@ def test_lookup_single_key_nonempty_response(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
(found,), missing, deferred = conn.lookup(PROJECT, [key_pb])
self.assertEqual(len(missing), 0)
self.assertEqual(len(deferred), 0)
- self.assertEqual(found.key.path_element[0].kind, 'Kind')
- self.assertEqual(found.key.path_element[0].id, 1234)
+ self.assertEqual(found.key.path[0].kind, 'Kind')
+ self.assertEqual(found.key.path[0].id, 1234)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 1)
- _compare_key_pb_after_request(self, key_pb, keys[0])
+ self.assertEqual(key_pb, keys[0])
def test_lookup_multiple_keys_empty_response(self):
from gcloud.datastore._generated import datastore_pb2
@@ -379,11 +367,9 @@ def test_lookup_multiple_keys_empty_response(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
@@ -395,10 +381,10 @@ def test_lookup_multiple_keys_empty_response(self):
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 2)
- _compare_key_pb_after_request(self, key_pb1, keys[0])
- _compare_key_pb_after_request(self, key_pb2, keys[1])
+ self.assertEqual(key_pb1, keys[0])
+ self.assertEqual(key_pb2, keys[1])
def test_lookup_multiple_keys_w_missing(self):
from gcloud.datastore._generated import datastore_pb2
@@ -414,11 +400,9 @@ def test_lookup_multiple_keys_w_missing(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
@@ -431,10 +415,10 @@ def test_lookup_multiple_keys_w_missing(self):
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 2)
- _compare_key_pb_after_request(self, key_pb1, keys[0])
- _compare_key_pb_after_request(self, key_pb2, keys[1])
+ self.assertEqual(key_pb1, keys[0])
+ self.assertEqual(key_pb2, keys[1])
def test_lookup_multiple_keys_w_deferred(self):
from gcloud.datastore._generated import datastore_pb2
@@ -448,11 +432,9 @@ def test_lookup_multiple_keys_w_deferred(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'lookup',
+ 'projects',
+ PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
@@ -469,10 +451,10 @@ def test_lookup_multiple_keys_w_deferred(self):
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
- keys = list(request.key)
+ keys = list(request.keys)
self.assertEqual(len(keys), 2)
- _compare_key_pb_after_request(self, key_pb1, keys[0])
- _compare_key_pb_after_request(self, key_pb2, keys[1])
+ self.assertEqual(key_pb1, keys[0])
+ self.assertEqual(key_pb2, keys[1])
def test_run_query_w_eventual_no_transaction(self):
from gcloud.datastore._generated import datastore_pb2
@@ -490,11 +472,9 @@ def test_run_query_w_eventual_no_transaction(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'runQuery',
+ 'projects',
+ PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(PROJECT, q_pb,
@@ -508,7 +488,7 @@ def test_run_query_w_eventual_no_transaction(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.partition_id.namespace, '')
+ self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.EVENTUAL)
@@ -531,11 +511,9 @@ def test_run_query_wo_eventual_w_transaction(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'runQuery',
+ 'projects',
+ PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(
@@ -549,10 +527,11 @@ def test_run_query_wo_eventual_w_transaction(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.partition_id.namespace, '')
+ self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
- self.assertEqual(request.read_options.read_consistency,
- datastore_pb2.ReadOptions.DEFAULT)
+ self.assertEqual(
+ request.read_options.read_consistency,
+ datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED)
self.assertEqual(request.read_options.transaction, TRANSACTION)
def test_run_query_w_eventual_and_transaction(self):
@@ -589,11 +568,9 @@ def test_run_query_wo_namespace_empty_result(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'runQuery',
+ 'projects',
+ PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(PROJECT, q_pb)
@@ -606,7 +583,7 @@ def test_run_query_wo_namespace_empty_result(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.partition_id.namespace, '')
+ self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
def test_run_query_w_namespace_nonempty_result(self):
@@ -618,17 +595,15 @@ def test_run_query_w_namespace_nonempty_result(self):
entity = entity_pb2.Entity()
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb2.RunQueryResponse()
- rsp_pb.batch.entity_result.add(entity=entity)
+ rsp_pb.batch.entity_results.add(entity=entity)
rsp_pb.batch.entity_result_type = 1 # FULL
rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'runQuery',
+ 'projects',
+ PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs = conn.run_query(PROJECT, q_pb, 'NS')[0]
@@ -638,7 +613,7 @@ def test_run_query_w_namespace_nonempty_result(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.partition_id.namespace, 'NS')
+ self.assertEqual(request.partition_id.namespace_id, 'NS')
self.assertEqual(request.query, q_pb)
def test_begin_transaction(self):
@@ -651,11 +626,9 @@ def test_begin_transaction(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'beginTransaction',
+ 'projects',
+ PROJECT + ':beginTransaction',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION)
@@ -664,7 +637,6 @@ def test_begin_transaction(self):
rq_class = datastore_pb2.BeginTransactionRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.isolation_level, rq_class.SERIALIZABLE)
def test_commit_wo_transaction(self):
from gcloud._testing import _Monkey
@@ -676,19 +648,17 @@ def test_commit_wo_transaction(self):
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.CommitResponse()
req_pb = datastore_pb2.CommitRequest()
- mutation = req_pb.mutation
- insert = mutation.upsert.add()
+ mutation = req_pb.mutations.add()
+ insert = mutation.upsert
insert.key.CopyFrom(key_pb)
value_pb = _new_value_pb(insert, 'foo')
value_pb.string_value = u'Foo'
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'commit',
+ 'projects',
+ PROJECT + ':commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
@@ -710,7 +680,7 @@ def mock_parse(response):
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, b'')
- self.assertEqual(request.mutation, mutation)
+ self.assertEqual(list(request.mutations), [mutation])
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
self.assertEqual(_parsed, [rsp_pb])
@@ -724,19 +694,17 @@ def test_commit_w_transaction(self):
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.CommitResponse()
req_pb = datastore_pb2.CommitRequest()
- mutation = req_pb.mutation
- insert = mutation.upsert.add()
+ mutation = req_pb.mutations.add()
+ insert = mutation.upsert
insert.key.CopyFrom(key_pb)
value_pb = _new_value_pb(insert, 'foo')
value_pb.string_value = u'Foo'
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'commit',
+ 'projects',
+ PROJECT + ':commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
@@ -758,7 +726,7 @@ def mock_parse(response):
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, b'xact')
- self.assertEqual(request.mutation, mutation)
+ self.assertEqual(list(request.mutations), [mutation])
self.assertEqual(request.mode, rq_class.TRANSACTIONAL)
self.assertEqual(_parsed, [rsp_pb])
@@ -771,11 +739,9 @@ def test_rollback_ok(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'rollback',
+ 'projects',
+ PROJECT + ':rollback',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.rollback(PROJECT, TRANSACTION), None)
@@ -794,11 +760,9 @@ def test_allocate_ids_empty(self):
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'allocateIds',
+ 'projects',
+ PROJECT + ':allocateIds',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.allocate_ids(PROJECT, []), [])
@@ -807,7 +771,7 @@ def test_allocate_ids_empty(self):
rq_class = datastore_pb2.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(list(request.key), [])
+ self.assertEqual(list(request.keys), [])
def test_allocate_ids_non_empty(self):
from gcloud.datastore._generated import datastore_pb2
@@ -822,16 +786,14 @@ def test_allocate_ids_non_empty(self):
self._make_key_pb(PROJECT, id=2345),
]
rsp_pb = datastore_pb2.AllocateIdsResponse()
- rsp_pb.key.add().CopyFrom(after_key_pbs[0])
- rsp_pb.key.add().CopyFrom(after_key_pbs[1])
+ rsp_pb.keys.add().CopyFrom(after_key_pbs[0])
+ rsp_pb.keys.add().CopyFrom(after_key_pbs[1])
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
- 'datastore',
conn.API_VERSION,
- 'datasets',
- PROJECT,
- 'allocateIds',
+ 'projects',
+ PROJECT + ':allocateIds',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs),
@@ -841,9 +803,9 @@ def test_allocate_ids_non_empty(self):
rq_class = datastore_pb2.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(len(request.key), len(before_key_pbs))
- for key_before, key_after in zip(before_key_pbs, request.key):
- _compare_key_pb_after_request(self, key_before, key_after)
+ self.assertEqual(len(request.keys), len(before_key_pbs))
+ for key_before, key_after in zip(before_key_pbs, request.keys):
+ self.assertEqual(key_before, key_after)
class Test__parse_commit_response(unittest2.TestCase):
@@ -859,7 +821,7 @@ def test_it(self):
index_updates = 1337
keys = [
entity_pb2.Key(
- path_element=[
+ path=[
entity_pb2.Key.PathElement(
kind='Foo',
id=1234,
@@ -867,7 +829,7 @@ def test_it(self):
],
),
entity_pb2.Key(
- path_element=[
+ path=[
entity_pb2.Key.PathElement(
kind='Bar',
name='baz',
@@ -876,10 +838,10 @@ def test_it(self):
),
]
response = datastore_pb2.CommitResponse(
- mutation_result=datastore_pb2.MutationResult(
- index_updates=index_updates,
- insert_auto_id_key=keys,
- ),
+ mutation_results=[
+ datastore_pb2.MutationResult(key=key) for key in keys
+ ],
+ index_updates=index_updates,
)
result = self._callFUT(response)
self.assertEqual(result, (index_updates, keys))
@@ -899,17 +861,6 @@ def request(self, **kw):
return self._response, self._content
-def _compare_key_pb_after_request(test, key_before, key_after):
- # Unset values are False-y.
- test.assertEqual(key_after.partition_id.dataset_id, '')
- test.assertEqual(key_before.partition_id.namespace,
- key_after.partition_id.namespace)
- test.assertEqual(len(key_before.path_element),
- len(key_after.path_element))
- for elt1, elt2 in zip(key_before.path_element, key_after.path_element):
- test.assertEqual(elt1, elt2)
-
-
class _PathElementProto(object):
def __init__(self, _id):
@@ -919,4 +870,4 @@ def __init__(self, _id):
class _KeyProto(object):
def __init__(self, id_):
- self.path_element = [_PathElementProto(id_)]
+ self.path = [_PathElementProto(id_)]
diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py
index 7d86382e7f32..caa5e9cec2b1 100644
--- a/gcloud/datastore/test_helpers.py
+++ b/gcloud/datastore/test_helpers.py
@@ -29,9 +29,8 @@ def test_it(self):
result = self._callFUT(entity_pb, name)
self.assertTrue(isinstance(result, entity_pb2.Value))
- self.assertEqual(len(entity_pb.property), 1)
- self.assertEqual(entity_pb.property[0].name, name)
- self.assertEqual(entity_pb.property[0].value, result)
+ self.assertEqual(len(entity_pb.properties), 1)
+ self.assertEqual(entity_pb.properties[name], result)
class Test__property_tuples(unittest2.TestCase):
@@ -53,7 +52,8 @@ def test_it(self):
result = self._callFUT(entity_pb)
self.assertTrue(isinstance(result, types.GeneratorType))
- self.assertEqual(list(result), [(name1, val_pb1), (name2, val_pb2)])
+ self.assertEqual(sorted(result),
+ sorted([(name1, val_pb1), (name2, val_pb2)]))
class Test_entity_from_protobuf(unittest2.TestCase):
@@ -70,29 +70,28 @@ def test_it(self):
_KIND = 'KIND'
_ID = 1234
entity_pb = entity_pb2.Entity()
- entity_pb.key.partition_id.dataset_id = _PROJECT
- entity_pb.key.path_element.add(kind=_KIND, id=_ID)
+ entity_pb.key.partition_id.project_id = _PROJECT
+ entity_pb.key.path.add(kind=_KIND, id=_ID)
value_pb = _new_value_pb(entity_pb, 'foo')
value_pb.string_value = 'Foo'
unindexed_val_pb = _new_value_pb(entity_pb, 'bar')
unindexed_val_pb.integer_value = 10
- unindexed_val_pb.indexed = False
+ unindexed_val_pb.exclude_from_indexes = True
- list_val_pb1 = _new_value_pb(entity_pb, 'baz')
- list_pb1 = list_val_pb1.list_value
+ array_val_pb1 = _new_value_pb(entity_pb, 'baz')
+ array_pb1 = array_val_pb1.array_value.values
- unindexed_list_val_pb = list_pb1.add()
- unindexed_list_val_pb.integer_value = 11
- unindexed_list_val_pb.indexed = False
+ unindexed_array_val_pb = array_pb1.add()
+ unindexed_array_val_pb.integer_value = 11
+ unindexed_array_val_pb.exclude_from_indexes = True
- list_val_pb2 = _new_value_pb(entity_pb, 'qux')
- list_pb2 = list_val_pb2.list_value
+ array_val_pb2 = _new_value_pb(entity_pb, 'qux')
+ array_pb2 = array_val_pb2.array_value.values
- indexed_list_val_pb = list_pb2.add()
- indexed_list_val_pb.integer_value = 12
- indexed_list_val_pb.indexed = True
+ indexed_array_val_pb = array_pb2.add()
+ indexed_array_val_pb.integer_value = 12
entity = self._callFUT(entity_pb)
self.assertEqual(entity.kind, _KIND)
@@ -117,19 +116,18 @@ def test_mismatched_value_indexed(self):
_KIND = 'KIND'
_ID = 1234
entity_pb = entity_pb2.Entity()
- entity_pb.key.partition_id.dataset_id = _PROJECT
- entity_pb.key.path_element.add(kind=_KIND, id=_ID)
+ entity_pb.key.partition_id.project_id = _PROJECT
+ entity_pb.key.path.add(kind=_KIND, id=_ID)
- list_val_pb = _new_value_pb(entity_pb, 'baz')
- list_pb = list_val_pb.list_value
+ array_val_pb = _new_value_pb(entity_pb, 'baz')
+ array_pb = array_val_pb.array_value.values
- unindexed_value_pb1 = list_pb.add()
+ unindexed_value_pb1 = array_pb.add()
unindexed_value_pb1.integer_value = 10
- unindexed_value_pb1.indexed = False
+ unindexed_value_pb1.exclude_from_indexes = True
- unindexed_value_pb2 = list_pb.add()
+ unindexed_value_pb2 = array_pb.add()
unindexed_value_pb2.integer_value = 11
- unindexed_value_pb2.indexed = True
with self.assertRaises(ValueError):
self._callFUT(entity_pb)
@@ -162,7 +160,7 @@ def test_nested_entity_no_key(self):
from gcloud.datastore._generated import entity_pb2
from gcloud.datastore.helpers import _new_value_pb
- PROJECT = 's~FOO'
+ PROJECT = 'FOO'
KIND = 'KIND'
INSIDE_NAME = 'IFOO'
OUTSIDE_NAME = 'OBAR'
@@ -173,8 +171,8 @@ def test_nested_entity_no_key(self):
inside_val_pb.integer_value = INSIDE_VALUE
entity_pb = entity_pb2.Entity()
- entity_pb.key.partition_id.dataset_id = PROJECT
- element = entity_pb.key.path_element.add()
+ entity_pb.key.partition_id.project_id = PROJECT
+ element = entity_pb.key.path.add()
element.kind = KIND
outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME)
@@ -235,8 +233,8 @@ def test_key_only(self):
entity_pb = self._callFUT(entity)
expected_pb = entity_pb2.Entity()
- expected_pb.key.partition_id.dataset_id = project
- path_elt = expected_pb.key.path_element.add()
+ expected_pb.key.partition_id.project_id = project
+ path_elt = expected_pb.key.path.add()
path_elt.kind = kind
path_elt.name = name
@@ -279,18 +277,18 @@ def test_inverts_to_protobuf(self):
original_pb = entity_pb2.Entity()
# Add a key.
- original_pb.key.partition_id.dataset_id = project = 'PROJECT'
- elem1 = original_pb.key.path_element.add()
+ original_pb.key.partition_id.project_id = project = 'PROJECT'
+ elem1 = original_pb.key.path.add()
elem1.kind = 'Family'
elem1.id = 1234
- elem2 = original_pb.key.path_element.add()
+ elem2 = original_pb.key.path.add()
elem2.kind = 'King'
elem2.name = 'Spades'
# Add an integer property.
val_pb1 = _new_value_pb(original_pb, 'foo')
val_pb1.integer_value = 1337
- val_pb1.indexed = False
+ val_pb1.exclude_from_indexes = True
# Add a string property.
val_pb2 = _new_value_pb(original_pb, 'bar')
val_pb2.string_value = u'hello'
@@ -307,14 +305,14 @@ def test_inverts_to_protobuf(self):
# Add a list property.
val_pb4 = _new_value_pb(original_pb, 'list-quux')
- list_val1 = val_pb4.list_value.add()
- list_val1.indexed = False
- list_val1.meaning = meaning = 22
- list_val1.blob_value = b'\xe2\x98\x83'
- list_val2 = val_pb4.list_value.add()
- list_val2.indexed = False
- list_val2.meaning = meaning
- list_val2.blob_value = b'\xe2\x98\x85'
+ array_val1 = val_pb4.array_value.values.add()
+ array_val1.exclude_from_indexes = False
+ array_val1.meaning = meaning = 22
+ array_val1.blob_value = b'\xe2\x98\x83'
+ array_val2 = val_pb4.array_value.values.add()
+ array_val2.exclude_from_indexes = False
+ array_val2.meaning = meaning
+ array_val2.blob_value = b'\xe2\x98\x85'
# Convert to the user-space Entity.
entity = entity_from_protobuf(original_pb)
@@ -322,7 +320,7 @@ def test_inverts_to_protobuf(self):
new_pb = self._callFUT(entity)
# NOTE: entity_to_protobuf() strips the project so we "cheat".
- new_pb.key.partition_id.dataset_id = project
+ new_pb.key.partition_id.project_id = project
self._compareEntityProto(original_pb, new_pb)
def test_meaning_with_change(self):
@@ -355,11 +353,11 @@ def _makePB(self, project=None, namespace=None, path=()):
from gcloud.datastore._generated import entity_pb2
pb = entity_pb2.Key()
if project is not None:
- pb.partition_id.dataset_id = project
+ pb.partition_id.project_id = project
if namespace is not None:
- pb.partition_id.namespace = namespace
+ pb.partition_id.namespace_id = namespace
for elem in path:
- added = pb.path_element.add()
+ added = pb.path.add()
added.kind = elem['kind']
if 'id' in elem:
added.id = elem['id']
@@ -410,23 +408,25 @@ def test_datetime_naive(self):
import datetime
from gcloud._helpers import UTC
- naive = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375) # No zone.
- utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, UTC)
+ micros = 4375
+ naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone.
+ utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
name, value = self._callFUT(naive)
- self.assertEqual(name, 'timestamp_microseconds_value')
- self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
- self.assertEqual(value % 1000000, 4375)
+ self.assertEqual(name, 'timestamp_value')
+ self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
+ self.assertEqual(value.nanos, 1000 * micros)
def test_datetime_w_zone(self):
import calendar
import datetime
from gcloud._helpers import UTC
- utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, UTC)
+ micros = 4375
+ utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
name, value = self._callFUT(utc)
- self.assertEqual(name, 'timestamp_microseconds_value')
- self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
- self.assertEqual(value % 1000000, 4375)
+ self.assertEqual(name, 'timestamp_value')
+ self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
+ self.assertEqual(value.nanos, 1000 * micros)
def test_key(self):
from gcloud.datastore.key import Key
@@ -483,12 +483,31 @@ def test_entity(self):
self.assertEqual(name, 'entity_value')
self.assertTrue(value is entity)
- def test_list(self):
+ def test_array(self):
values = ['a', 0, 3.14]
name, value = self._callFUT(values)
- self.assertEqual(name, 'list_value')
+ self.assertEqual(name, 'array_value')
self.assertTrue(value is values)
+ def test_geo_point(self):
+ from google.type import latlng_pb2
+ from gcloud.datastore.helpers import GeoPoint
+
+ lat = 42.42
+ lng = 99.0007
+ geo_pt = GeoPoint(latitude=lat, longitude=lng)
+ geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+ name, value = self._callFUT(geo_pt)
+ self.assertEqual(name, 'geo_point_value')
+ self.assertEqual(value, geo_pt_pb)
+
+ def test_null(self):
+ from google.protobuf import struct_pb2
+
+ name, value = self._callFUT(None)
+ self.assertEqual(name, 'null_value')
+ self.assertEqual(value, struct_pb2.NULL_VALUE)
+
def test_object(self):
self.assertRaises(ValueError, self._callFUT, object())
@@ -511,10 +530,13 @@ def test_datetime(self):
import calendar
import datetime
from gcloud._helpers import UTC
+ from gcloud.datastore._generated import entity_pb2
- utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, UTC)
- micros = (calendar.timegm(utc.timetuple()) * 1000000) + 4375
- pb = self._makePB('timestamp_microseconds_value', micros)
+ micros = 4375
+ utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
+ pb = entity_pb2.Value()
+ pb.timestamp_value.seconds = calendar.timegm(utc.timetuple())
+ pb.timestamp_value.nanos = 1000 * micros
self.assertEqual(self._callFUT(pb), utc)
def test_key(self):
@@ -554,8 +576,8 @@ def test_entity(self):
pb = entity_pb2.Value()
entity_pb = pb.entity_value
- entity_pb.key.path_element.add(kind='KIND')
- entity_pb.key.partition_id.dataset_id = 'PROJECT'
+ entity_pb.key.path.add(kind='KIND')
+ entity_pb.key.partition_id.project_id = 'PROJECT'
value_pb = _new_value_pb(entity_pb, 'foo')
value_pb.string_value = 'Foo'
@@ -563,23 +585,46 @@ def test_entity(self):
self.assertTrue(isinstance(entity, Entity))
self.assertEqual(entity['foo'], 'Foo')
- def test_list(self):
+ def test_array(self):
from gcloud.datastore._generated import entity_pb2
pb = entity_pb2.Value()
- list_pb = pb.list_value
- item_pb = list_pb.add()
+ array_pb = pb.array_value.values
+ item_pb = array_pb.add()
item_pb.string_value = 'Foo'
- item_pb = list_pb.add()
+ item_pb = array_pb.add()
item_pb.string_value = 'Bar'
items = self._callFUT(pb)
self.assertEqual(items, ['Foo', 'Bar'])
+ def test_geo_point(self):
+ from google.type import latlng_pb2
+ from gcloud.datastore._generated import entity_pb2
+ from gcloud.datastore.helpers import GeoPoint
+
+ lat = -3.14
+ lng = 13.37
+ geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+ pb = entity_pb2.Value(geo_point_value=geo_pt_pb)
+ result = self._callFUT(pb)
+ self.assertIsInstance(result, GeoPoint)
+ self.assertEqual(result.latitude, lat)
+ self.assertEqual(result.longitude, lng)
+
+ def test_null(self):
+ from google.protobuf import struct_pb2
+ from gcloud.datastore._generated import entity_pb2
+
+ pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE)
+ result = self._callFUT(pb)
+ self.assertIsNone(result)
+
def test_unknown(self):
from gcloud.datastore._generated import entity_pb2
pb = entity_pb2.Value()
- self.assertEqual(self._callFUT(pb), None)
+ with self.assertRaises(ValueError):
+ self._callFUT(pb)
class Test_set_protobuf_value(unittest2.TestCase):
@@ -599,11 +644,12 @@ def test_datetime(self):
from gcloud._helpers import UTC
pb = self._makePB()
- utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, UTC)
+ micros = 4375
+ utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
self._callFUT(pb, utc)
- value = pb.timestamp_microseconds_value
- self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
- self.assertEqual(value % 1000000, 4375)
+ value = pb.timestamp_value
+ self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
+ self.assertEqual(value.nanos, 1000 * micros)
def test_key(self):
from gcloud.datastore.key import Key
@@ -615,23 +661,9 @@ def test_key(self):
self.assertEqual(value, key.to_protobuf())
def test_none(self):
- from gcloud.datastore.entity import Entity
-
- entity = Entity()
pb = self._makePB()
-
- self._callFUT(pb, False)
- self._callFUT(pb, 3.1415926)
- self._callFUT(pb, 42)
- self._callFUT(pb, (1 << 63) - 1)
- self._callFUT(pb, 'str')
- self._callFUT(pb, b'str')
- self._callFUT(pb, u'str')
- self._callFUT(pb, entity)
- self._callFUT(pb, [u'a', 0, 3.14])
-
self._callFUT(pb, None)
- self.assertEqual(len(pb.ListFields()), 0)
+ self.assertEqual(pb.WhichOneof('value_type'), 'null_value')
def test_bool(self):
pb = self._makePB()
@@ -711,95 +743,27 @@ def test_entity_w_key(self):
self.assertEqual(list(prop_dict.keys()), [name])
self.assertEqual(prop_dict[name].string_value, value)
- def test_list(self):
+ def test_array(self):
pb = self._makePB()
values = [u'a', 0, 3.14]
self._callFUT(pb, values)
- marshalled = pb.list_value
+ marshalled = pb.array_value.values
self.assertEqual(len(marshalled), len(values))
self.assertEqual(marshalled[0].string_value, values[0])
self.assertEqual(marshalled[1].integer_value, values[1])
self.assertEqual(marshalled[2].double_value, values[2])
+ def test_geo_point(self):
+ from google.type import latlng_pb2
+ from gcloud.datastore.helpers import GeoPoint
-class Test__prepare_key_for_request(unittest2.TestCase):
-
- def _callFUT(self, key_pb):
- from gcloud.datastore.helpers import _prepare_key_for_request
-
- return _prepare_key_for_request(key_pb)
-
- def test_prepare_project_valid(self):
- from gcloud.datastore._generated import entity_pb2
- key = entity_pb2.Key()
- key.partition_id.dataset_id = 'foo'
- new_key = self._callFUT(key)
- self.assertFalse(new_key is key)
-
- key_without = entity_pb2.Key()
- new_key.ClearField('partition_id')
- self.assertEqual(new_key, key_without)
-
- def test_prepare_project_unset(self):
- from gcloud.datastore._generated import entity_pb2
- key = entity_pb2.Key()
- new_key = self._callFUT(key)
- self.assertTrue(new_key is key)
-
-
-class Test_find_true_project(unittest2.TestCase):
-
- def _callFUT(self, project, connection):
- from gcloud.datastore.helpers import find_true_project
- return find_true_project(project, connection)
-
- def test_prefixed(self):
- PREFIXED = 's~PROJECT'
- result = self._callFUT(PREFIXED, object())
- self.assertEqual(PREFIXED, result)
-
- def test_unprefixed_bogus_key_miss(self):
- UNPREFIXED = 'PROJECT'
- PREFIX = 's~'
- CONNECTION = _Connection(PREFIX, from_missing=False)
- result = self._callFUT(UNPREFIXED, CONNECTION)
-
- self.assertEqual(CONNECTION._called_project, UNPREFIXED)
-
- self.assertEqual(len(CONNECTION._lookup_result), 1)
-
- # Make sure just one.
- called_key_pb, = CONNECTION._called_key_pbs
- path_element = called_key_pb.path_element
- self.assertEqual(len(path_element), 1)
- self.assertEqual(path_element[0].kind, '__MissingLookupKind')
- self.assertEqual(path_element[0].id, 1)
- # Unset values are False-y.
- self.assertEqual(path_element[0].name, '')
-
- PREFIXED = PREFIX + UNPREFIXED
- self.assertEqual(result, PREFIXED)
-
- def test_unprefixed_bogus_key_hit(self):
- UNPREFIXED = 'PROJECT'
- PREFIX = 'e~'
- CONNECTION = _Connection(PREFIX, from_missing=True)
- result = self._callFUT(UNPREFIXED, CONNECTION)
-
- self.assertEqual(CONNECTION._called_project, UNPREFIXED)
- self.assertEqual(CONNECTION._lookup_result, [])
-
- # Make sure just one.
- called_key_pb, = CONNECTION._called_key_pbs
- path_element = called_key_pb.path_element
- self.assertEqual(len(path_element), 1)
- self.assertEqual(path_element[0].kind, '__MissingLookupKind')
- self.assertEqual(path_element[0].id, 1)
- # Unset values are False-y.
- self.assertEqual(path_element[0].name, '')
-
- PREFIXED = PREFIX + UNPREFIXED
- self.assertEqual(result, PREFIXED)
+ pb = self._makePB()
+ lat = 9.11
+ lng = 3.337
+ geo_pt = GeoPoint(latitude=lat, longitude=lng)
+ geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+ self._callFUT(pb, geo_pt)
+ self.assertEqual(pb.geo_point_value, geo_pt_pb)
class Test__get_meaning(unittest2.TestCase):
@@ -824,23 +788,23 @@ def test_single(self):
result = self._callFUT(value_pb)
self.assertEqual(meaning, result)
- def test_empty_list_value(self):
+ def test_empty_array_value(self):
from gcloud.datastore._generated import entity_pb2
value_pb = entity_pb2.Value()
- value_pb.list_value.add()
- value_pb.list_value.pop()
+ value_pb.array_value.values.add()
+ value_pb.array_value.values.pop()
result = self._callFUT(value_pb, is_list=True)
self.assertEqual(None, result)
- def test_list_value(self):
+ def test_array_value(self):
from gcloud.datastore._generated import entity_pb2
value_pb = entity_pb2.Value()
meaning = 9
- sub_value_pb1 = value_pb.list_value.add()
- sub_value_pb2 = value_pb.list_value.add()
+ sub_value_pb1 = value_pb.array_value.values.add()
+ sub_value_pb2 = value_pb.array_value.values.add()
sub_value_pb1.meaning = sub_value_pb2.meaning = meaning
sub_value_pb1.string_value = u'hi'
@@ -849,14 +813,14 @@ def test_list_value(self):
result = self._callFUT(value_pb, is_list=True)
self.assertEqual(meaning, result)
- def test_list_value_disagreeing(self):
+ def test_array_value_disagreeing(self):
from gcloud.datastore._generated import entity_pb2
value_pb = entity_pb2.Value()
meaning1 = 9
meaning2 = 10
- sub_value_pb1 = value_pb.list_value.add()
- sub_value_pb2 = value_pb.list_value.add()
+ sub_value_pb1 = value_pb.array_value.values.add()
+ sub_value_pb2 = value_pb.array_value.values.add()
sub_value_pb1.meaning = meaning1
sub_value_pb2.meaning = meaning2
@@ -866,13 +830,13 @@ def test_list_value_disagreeing(self):
with self.assertRaises(ValueError):
self._callFUT(value_pb, is_list=True)
- def test_list_value_partially_unset(self):
+ def test_array_value_partially_unset(self):
from gcloud.datastore._generated import entity_pb2
value_pb = entity_pb2.Value()
meaning1 = 9
- sub_value_pb1 = value_pb.list_value.add()
- sub_value_pb2 = value_pb.list_value.add()
+ sub_value_pb1 = value_pb.array_value.values.add()
+ sub_value_pb2 = value_pb.array_value.values.add()
sub_value_pb1.meaning = meaning1
sub_value_pb1.string_value = u'hi'
@@ -882,33 +846,55 @@ def test_list_value_partially_unset(self):
self._callFUT(value_pb, is_list=True)
-class _Connection(object):
-
- _called_project = _called_key_pbs = _lookup_result = None
-
- def __init__(self, prefix, from_missing=False):
- self.prefix = prefix
- self.from_missing = from_missing
-
- def lookup(self, project, key_pbs):
- from gcloud.datastore._generated import entity_pb2
-
- # Store the arguments called with.
- self._called_project = project
- self._called_key_pbs = key_pbs
-
- key_pb, = key_pbs
-
- response = entity_pb2.Entity()
- response.key.CopyFrom(key_pb)
- response.key.partition_id.dataset_id = self.prefix + project
-
- missing = []
- deferred = []
- if self.from_missing:
- missing[:] = [response]
- self._lookup_result = []
- else:
- self._lookup_result = [response]
-
- return self._lookup_result, missing, deferred
+class TestGeoPoint(unittest2.TestCase):
+
+ def _getTargetClass(self):
+ from gcloud.datastore.helpers import GeoPoint
+ return GeoPoint
+
+ def _makeOne(self, *args, **kwargs):
+ return self._getTargetClass()(*args, **kwargs)
+
+ def test_constructor(self):
+ lat = 81.2
+ lng = 359.9999
+ geo_pt = self._makeOne(lat, lng)
+ self.assertEqual(geo_pt.latitude, lat)
+ self.assertEqual(geo_pt.longitude, lng)
+
+ def test_to_protobuf(self):
+ from google.type import latlng_pb2
+
+ lat = 0.0001
+ lng = 20.03
+ geo_pt = self._makeOne(lat, lng)
+ result = geo_pt.to_protobuf()
+ geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
+ self.assertEqual(result, geo_pt_pb)
+
+ def test___eq__(self):
+ lat = 0.0001
+ lng = 20.03
+ geo_pt1 = self._makeOne(lat, lng)
+ geo_pt2 = self._makeOne(lat, lng)
+ self.assertEqual(geo_pt1, geo_pt2)
+
+ def test___eq__type_differ(self):
+ lat = 0.0001
+ lng = 20.03
+ geo_pt1 = self._makeOne(lat, lng)
+ geo_pt2 = object()
+ self.assertNotEqual(geo_pt1, geo_pt2)
+
+ def test___ne__same_value(self):
+ lat = 0.0001
+ lng = 20.03
+ geo_pt1 = self._makeOne(lat, lng)
+ geo_pt2 = self._makeOne(lat, lng)
+ comparison_val = (geo_pt1 != geo_pt2)
+ self.assertFalse(comparison_val)
+
+ def test___ne__(self):
+ geo_pt1 = self._makeOne(0.0, 1.0)
+ geo_pt2 = self._makeOne(2.0, 3.0)
+ self.assertNotEqual(geo_pt1, geo_pt2)
diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py
index 3b3d0a120849..ce214e418f4a 100644
--- a/gcloud/datastore/test_key.py
+++ b/gcloud/datastore/test_key.py
@@ -221,16 +221,6 @@ def test___eq_____ne___same_kind_and_id_different_namespace(self):
self.assertFalse(key1 == key2)
self.assertTrue(key1 != key2)
- def test___eq_____ne___same_kind_and_id_different_project_pfx(self):
- _PROJECT = 'PROJECT'
- _PROJECT_W_PFX = 's~PROJECT'
- _KIND = 'KIND'
- _ID = 1234
- key1 = self._makeOne(_KIND, _ID, project=_PROJECT)
- key2 = self._makeOne(_KIND, _ID, project=_PROJECT_W_PFX)
- self.assertTrue(key1 == key2)
- self.assertFalse(key1 != key2)
-
def test___eq_____ne___same_kind_different_names(self):
_PROJECT = 'PROJECT'
_KIND = 'KIND'
@@ -273,16 +263,6 @@ def test___eq_____ne___same_kind_and_name_different_namespace(self):
self.assertFalse(key1 == key2)
self.assertTrue(key1 != key2)
- def test___eq_____ne___same_kind_and_name_different_project_pfx(self):
- _PROJECT = 'PROJECT'
- _PROJECT_W_PFX = 's~PROJECT'
- _KIND = 'KIND'
- _NAME = 'one'
- key1 = self._makeOne(_KIND, _NAME, project=_PROJECT)
- key2 = self._makeOne(_KIND, _NAME, project=_PROJECT_W_PFX)
- self.assertTrue(key1 == key2)
- self.assertFalse(key1 != key2)
-
def test___hash___incomplete(self):
_PROJECT = 'PROJECT'
_KIND = 'KIND'
@@ -341,12 +321,12 @@ def test_to_protobuf_defaults(self):
self.assertTrue(isinstance(pb, entity_pb2.Key))
# Check partition ID.
- self.assertEqual(pb.partition_id.dataset_id, self._DEFAULT_PROJECT)
+ self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT)
# Unset values are False-y.
- self.assertEqual(pb.partition_id.namespace, '')
+ self.assertEqual(pb.partition_id.namespace_id, '')
# Check the element PB matches the partial key and kind.
- elem, = list(pb.path_element)
+ elem, = list(pb.path)
self.assertEqual(elem.kind, _KIND)
# Unset values are False-y.
self.assertEqual(elem.name, '')
@@ -357,14 +337,14 @@ def test_to_protobuf_w_explicit_project(self):
_PROJECT = 'PROJECT-ALT'
key = self._makeOne('KIND', project=_PROJECT)
pb = key.to_protobuf()
- self.assertEqual(pb.partition_id.dataset_id, _PROJECT)
+ self.assertEqual(pb.partition_id.project_id, _PROJECT)
def test_to_protobuf_w_explicit_namespace(self):
_NAMESPACE = 'NAMESPACE'
key = self._makeOne('KIND', namespace=_NAMESPACE,
project=self._DEFAULT_PROJECT)
pb = key.to_protobuf()
- self.assertEqual(pb.partition_id.namespace, _NAMESPACE)
+ self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE)
def test_to_protobuf_w_explicit_path(self):
_PARENT = 'PARENT'
@@ -374,7 +354,7 @@ def test_to_protobuf_w_explicit_path(self):
key = self._makeOne(_PARENT, _NAME, _CHILD, _ID,
project=self._DEFAULT_PROJECT)
pb = key.to_protobuf()
- elems = list(pb.path_element)
+ elems = list(pb.path)
self.assertEqual(len(elems), 2)
self.assertEqual(elems[0].kind, _PARENT)
self.assertEqual(elems[0].name, _NAME)
@@ -388,7 +368,7 @@ def test_to_protobuf_w_no_kind(self):
key._path[-1].pop('kind')
pb = key.to_protobuf()
# Unset values are False-y.
- self.assertEqual(pb.path_element[0].kind, '')
+ self.assertEqual(pb.path[0].kind, '')
def test_is_partial_no_name_or_id(self):
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
@@ -449,27 +429,3 @@ def test_parent_multiple_calls(self):
self.assertEqual(parent.path, _PARENT_PATH)
new_parent = key.parent
self.assertTrue(parent is new_parent)
-
-
-class Test__projects_equal(unittest2.TestCase):
-
- def _callFUT(self, project1, project2):
- from gcloud.datastore.key import _projects_equal
- return _projects_equal(project1, project2)
-
- def test_identical_prefixed(self):
- self.assertTrue(self._callFUT('s~foo', 's~foo'))
- self.assertTrue(self._callFUT('e~bar', 'e~bar'))
-
- def test_different_prefixed(self):
- self.assertFalse(self._callFUT('s~foo', 's~bar'))
- self.assertFalse(self._callFUT('s~foo', 'e~foo'))
-
- def test_all_unprefixed(self):
- self.assertTrue(self._callFUT('foo', 'foo'))
- self.assertFalse(self._callFUT('foo', 'bar'))
-
- def test_unprefixed_with_prefixed(self):
- self.assertTrue(self._callFUT('foo', 's~foo'))
- self.assertTrue(self._callFUT('foo', 'e~foo'))
- self.assertFalse(self._callFUT('foo', 's~bar'))
diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py
index a16140fefd12..4108de7acd2e 100644
--- a/gcloud/datastore/test_query.py
+++ b/gcloud/datastore/test_query.py
@@ -42,7 +42,7 @@ def test_ctor_defaults(self):
self.assertEqual(query.filters, [])
self.assertEqual(query.projection, [])
self.assertEqual(query.order, [])
- self.assertEqual(query.group_by, [])
+ self.assertEqual(query.distinct_on, [])
def test_ctor_explicit(self):
from gcloud.datastore.key import Key
@@ -54,7 +54,7 @@ def test_ctor_explicit(self):
FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)]
PROJECTION = ['foo', 'bar', 'baz']
ORDER = ['foo', 'bar']
- GROUP_BY = ['foo']
+ DISTINCT_ON = ['foo']
query = self._makeOne(
client,
kind=_KIND,
@@ -64,7 +64,7 @@ def test_ctor_explicit(self):
filters=FILTERS,
projection=PROJECTION,
order=ORDER,
- group_by=GROUP_BY,
+ distinct_on=DISTINCT_ON,
)
self.assertTrue(query._client is client)
self.assertEqual(query.project, _PROJECT)
@@ -74,7 +74,7 @@ def test_ctor_explicit(self):
self.assertEqual(query.filters, FILTERS)
self.assertEqual(query.projection, PROJECTION)
self.assertEqual(query.order, ORDER)
- self.assertEqual(query.group_by, GROUP_BY)
+ self.assertEqual(query.distinct_on, DISTINCT_ON)
def test_ctor_bad_projection(self):
BAD_PROJECTION = object()
@@ -86,10 +86,10 @@ def test_ctor_bad_order(self):
self.assertRaises(TypeError, self._makeOne, self._makeClient(),
order=BAD_ORDER)
- def test_ctor_bad_group_by(self):
- BAD_GROUP_BY = object()
+ def test_ctor_bad_distinct_on(self):
+ BAD_DISTINCT_ON = object()
self.assertRaises(TypeError, self._makeOne, self._makeClient(),
- group_by=BAD_GROUP_BY)
+ distinct_on=BAD_DISTINCT_ON)
def test_ctor_bad_filters(self):
FILTERS_CANT_UNPACK = [('one', 'two')]
@@ -284,29 +284,29 @@ def test_order_setter_multiple(self):
query.order = ['foo', '-bar']
self.assertEqual(query.order, ['foo', '-bar'])
- def test_group_by_setter_empty(self):
- query = self._makeOne(self._makeClient(), group_by=['foo', 'bar'])
- query.group_by = []
- self.assertEqual(query.group_by, [])
+ def test_distinct_on_setter_empty(self):
+ query = self._makeOne(self._makeClient(), distinct_on=['foo', 'bar'])
+ query.distinct_on = []
+ self.assertEqual(query.distinct_on, [])
- def test_group_by_setter_string(self):
+ def test_distinct_on_setter_string(self):
query = self._makeOne(self._makeClient())
- query.group_by = 'field1'
- self.assertEqual(query.group_by, ['field1'])
+ query.distinct_on = 'field1'
+ self.assertEqual(query.distinct_on, ['field1'])
- def test_group_by_setter_non_empty(self):
+ def test_distinct_on_setter_non_empty(self):
query = self._makeOne(self._makeClient())
- query.group_by = ['field1', 'field2']
- self.assertEqual(query.group_by, ['field1', 'field2'])
+ query.distinct_on = ['field1', 'field2']
+ self.assertEqual(query.distinct_on, ['field1', 'field2'])
- def test_group_by_multiple_calls(self):
- _GROUP_BY1 = ['field1', 'field2']
- _GROUP_BY2 = ['field3']
+ def test_distinct_on_multiple_calls(self):
+ _DISTINCT_ON1 = ['field1', 'field2']
+ _DISTINCT_ON2 = ['field3']
query = self._makeOne(self._makeClient())
- query.group_by = _GROUP_BY1
- self.assertEqual(query.group_by, _GROUP_BY1)
- query.group_by = _GROUP_BY2
- self.assertEqual(query.group_by, _GROUP_BY2)
+ query.distinct_on = _DISTINCT_ON1
+ self.assertEqual(query.distinct_on, _DISTINCT_ON1)
+ query.distinct_on = _DISTINCT_ON2
+ self.assertEqual(query.distinct_on, _DISTINCT_ON2)
def test_fetch_defaults_w_client_attr(self):
connection = _Connection()
@@ -354,8 +354,8 @@ def _addQueryResults(self, connection, cursor=_END, more=False):
NO_MORE = query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT
_ID = 123
entity_pb = entity_pb2.Entity()
- entity_pb.key.partition_id.dataset_id = self._PROJECT
- path_element = entity_pb.key.path_element.add()
+ entity_pb.key.partition_id.project_id = self._PROJECT
+ path_element = entity_pb.key.path.add()
path_element.kind = self._KIND
path_element.id = _ID
value_pb = _new_value_pb(entity_pb, 'foo')
@@ -427,7 +427,7 @@ def test_next_page_no_cursors_no_more_w_offset_and_limit(self):
[{'kind': self._KIND, 'id': self._ID}])
self.assertEqual(entities[0]['foo'], u'Foo')
qpb = _pb_from_query(query)
- qpb.limit = 13
+ qpb.limit.value = 13
qpb.offset = 29
EXPECTED = {
'project': self._PROJECT,
@@ -557,14 +557,15 @@ def test_empty(self):
self.assertEqual(list(pb.projection), [])
self.assertEqual(list(pb.kind), [])
self.assertEqual(list(pb.order), [])
- self.assertEqual(list(pb.group_by), [])
+ self.assertEqual(list(pb.distinct_on), [])
self.assertEqual(pb.filter.property_filter.property.name, '')
cfilter = pb.filter.composite_filter
- self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
- self.assertEqual(list(cfilter.filter), [])
+ self.assertEqual(cfilter.op,
+ query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED)
+ self.assertEqual(list(cfilter.filters), [])
self.assertEqual(pb.start_cursor, b'')
self.assertEqual(pb.end_cursor, b'')
- self.assertEqual(pb.limit, 0)
+ self.assertEqual(pb.limit.value, 0)
self.assertEqual(pb.offset, 0)
def test_projection(self):
@@ -578,17 +579,16 @@ def test_kind(self):
def test_ancestor(self):
from gcloud.datastore.key import Key
- from gcloud.datastore.helpers import _prepare_key_for_request
from gcloud.datastore._generated import query_pb2
ancestor = Key('Ancestor', 123, project='PROJECT')
pb = self._callFUT(_Query(ancestor=ancestor))
cfilter = pb.filter.composite_filter
- self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
- self.assertEqual(len(cfilter.filter), 1)
- pfilter = cfilter.filter[0].property_filter
+ self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND)
+ self.assertEqual(len(cfilter.filters), 1)
+ pfilter = cfilter.filters[0].property_filter
self.assertEqual(pfilter.property.name, '__key__')
- ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf())
+ ancestor_pb = ancestor.to_protobuf()
self.assertEqual(pfilter.value.key_value, ancestor_pb)
def test_filter(self):
@@ -600,15 +600,14 @@ def test_filter(self):
}
pb = self._callFUT(query)
cfilter = pb.filter.composite_filter
- self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
- self.assertEqual(len(cfilter.filter), 1)
- pfilter = cfilter.filter[0].property_filter
+ self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND)
+ self.assertEqual(len(cfilter.filters), 1)
+ pfilter = cfilter.filters[0].property_filter
self.assertEqual(pfilter.property.name, 'name')
self.assertEqual(pfilter.value.string_value, u'John')
def test_filter_key(self):
from gcloud.datastore.key import Key
- from gcloud.datastore.helpers import _prepare_key_for_request
from gcloud.datastore._generated import query_pb2
key = Key('Kind', 123, project='PROJECT')
@@ -618,11 +617,11 @@ def test_filter_key(self):
}
pb = self._callFUT(query)
cfilter = pb.filter.composite_filter
- self.assertEqual(cfilter.operator, query_pb2.CompositeFilter.AND)
- self.assertEqual(len(cfilter.filter), 1)
- pfilter = cfilter.filter[0].property_filter
+ self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND)
+ self.assertEqual(len(cfilter.filters), 1)
+ pfilter = cfilter.filters[0].property_filter
self.assertEqual(pfilter.property.name, '__key__')
- key_pb = _prepare_key_for_request(key.to_protobuf())
+ key_pb = key.to_protobuf()
self.assertEqual(pfilter.value.key_value, key_pb)
def test_order(self):
@@ -636,9 +635,9 @@ def test_order(self):
query_pb2.PropertyOrder.DESCENDING,
query_pb2.PropertyOrder.ASCENDING])
- def test_group_by(self):
- pb = self._callFUT(_Query(group_by=['a', 'b', 'c']))
- self.assertEqual([item.name for item in pb.group_by],
+ def test_distinct_on(self):
+ pb = self._callFUT(_Query(distinct_on=['a', 'b', 'c']))
+ self.assertEqual([item.name for item in pb.distinct_on],
['a', 'b', 'c'])
@@ -653,7 +652,7 @@ def __init__(self,
filters=(),
projection=(),
order=(),
- group_by=()):
+ distinct_on=()):
self._client = client
self.kind = kind
self.project = project
@@ -662,7 +661,7 @@ def __init__(self,
self.filters = filters
self.projection = projection
self.order = order
- self.group_by = group_by
+ self.distinct_on = distinct_on
class _Connection(object):
diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py
index 8260e1916f70..5f780f63388a 100644
--- a/gcloud/datastore/test_transaction.py
+++ b/gcloud/datastore/test_transaction.py
@@ -35,7 +35,9 @@ def test_ctor_defaults(self):
self.assertEqual(xact.connection, connection)
self.assertEqual(xact.id, None)
self.assertEqual(xact._status, self._getTargetClass()._INITIAL)
- self.assertTrue(isinstance(xact.mutations, datastore_pb2.Mutation))
+ self.assertTrue(isinstance(xact._commit_request,
+ datastore_pb2.CommitRequest))
+ self.assertTrue(xact.mutations is xact._commit_request.mutations)
self.assertEqual(len(xact._partial_key_entities), 0)
def test_current(self):
@@ -166,8 +168,8 @@ def _make_key(kind, id_, project):
from gcloud.datastore._generated import entity_pb2
key = entity_pb2.Key()
- key.partition_id.dataset_id = project
- elem = key.path_element.add()
+ key.partition_id.project_id = project
+ elem = key.path.add()
elem.kind = kind
elem.id = id_
return key
diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py
index 1534dcbda29c..dc78c7ba99f9 100644
--- a/gcloud/datastore/transaction.py
+++ b/gcloud/datastore/transaction.py
@@ -24,7 +24,7 @@ class Transaction(Batch):
or none succeed (transactionally).
For example, the following snippet of code will put the two ``save``
- operations (either ``insert_auto_id`` or ``upsert``) into the same
+ operations (either ``insert`` or ``upsert``) into the same
mutation, and execute those within a transaction::
>>> from gcloud import datastore
diff --git a/gcloud/environment_vars.py b/gcloud/environment_vars.py
index 7a4b6aaff28a..344ffd3c8785 100644
--- a/gcloud/environment_vars.py
+++ b/gcloud/environment_vars.py
@@ -24,9 +24,6 @@
TESTS_PROJECT = 'GCLOUD_TESTS_PROJECT_ID'
"""Environment variable defining project for tests."""
-DATASET = 'GCLOUD_DATASET_ID'
-"""Environment variable defining default dataset ID."""
-
GCD_DATASET = 'DATASTORE_DATASET'
"""Environment variable defining default dataset ID under GCD."""
@@ -36,8 +33,5 @@
PUBSUB_EMULATOR = 'PUBSUB_EMULATOR_HOST'
"""Environment variable defining host for Pub/Sub emulator."""
-TESTS_DATASET = 'GCLOUD_TESTS_DATASET_ID'
-"""Environment variable defining dataset ID for tests."""
-
CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS'
"""Environment variable defining location of Google credentials."""
diff --git a/scripts/make_datastore_grpc.py b/scripts/make_datastore_grpc.py
new file mode 100644
index 000000000000..1de717c4a08c
--- /dev/null
+++ b/scripts/make_datastore_grpc.py
@@ -0,0 +1,120 @@
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Get the inserted gRPC lines for datastore pb2 file."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+ROOT_DIR = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..'))
+PROTOS_DIR = os.path.join(ROOT_DIR, 'googleapis-pb')
+PROTO_PATH = os.path.join(PROTOS_DIR, 'google', 'datastore',
+ 'v1beta3', 'datastore.proto')
+GRPC_ONLY_FILE = os.path.join(ROOT_DIR, 'gcloud', 'datastore',
+ '_generated', 'datastore_grpc_pb2.py')
+PROTOC_CMD = 'protoc'
+GRPC_PLUGIN = 'grpc_python_plugin'
+
+
+def get_pb2_contents_with_grpc():
+ """Get pb2 lines generated by protoc with gRPC plugin.
+
+ :rtype: list
+ :returns: A list of lines in the generated file.
+ """
+ temp_dir = tempfile.mkdtemp()
+ generated_path = os.path.join(temp_dir, 'google', 'datastore',
+ 'v1beta3', 'datastore_pb2.py')
+ try:
+ return_code = subprocess.call([
+ PROTOC_CMD,
+ '--proto_path',
+ PROTOS_DIR,
+ '--python_out',
+ temp_dir,
+ '--plugin',
+ 'protoc-gen-grpc=' + GRPC_PLUGIN,
+ '--grpc_out',
+ temp_dir,
+ PROTO_PATH,
+ ])
+ if return_code != 0:
+ sys.exit(return_code)
+ with open(generated_path, 'rb') as file_obj:
+ return file_obj.readlines()
+ finally:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+
+
+def get_pb2_contents_without_grpc():
+ """Get pb2 lines generated by protoc without gRPC plugin.
+
+ :rtype: list
+ :returns: A list of lines in the generated file.
+ """
+ temp_dir = tempfile.mkdtemp()
+ generated_path = os.path.join(temp_dir, 'google', 'datastore',
+ 'v1beta3', 'datastore_pb2.py')
+ try:
+ return_code = subprocess.call([
+ PROTOC_CMD,
+ '--proto_path',
+ PROTOS_DIR,
+ '--python_out',
+ temp_dir,
+ PROTO_PATH,
+ ])
+ if return_code != 0:
+ sys.exit(return_code)
+ with open(generated_path, 'rb') as file_obj:
+ return file_obj.readlines()
+ finally:
+ shutil.rmtree(temp_dir, ignore_errors=True)
+
+
+def get_pb2_grpc_only():
+ """Get pb2 lines that are only in gRPC.
+
+ :rtype: list
+ :returns: A list of lines that are only in the pb2 file
+ generated with the gRPC plugin.
+ """
+ grpc_contents = get_pb2_contents_with_grpc()
+ non_grpc_contents = get_pb2_contents_without_grpc()
+
+ grpc_only_lines = []
+ curr_non_grpc_line = 0
+ for line in grpc_contents:
+ if line == non_grpc_contents[curr_non_grpc_line]:
+ curr_non_grpc_line += 1
+ else:
+ grpc_only_lines.append(line)
+
+ return grpc_only_lines
+
+
+def main():
+ """Write gRPC-only lines to custom module."""
+ grpc_only_lines = get_pb2_grpc_only()
+ with open(GRPC_ONLY_FILE, 'wb') as file_obj:
+ file_obj.write(''.join(grpc_only_lines))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/make_operations_grpc.py b/scripts/make_operations_grpc.py
index 9bc85803313b..65b877250594 100644
--- a/scripts/make_operations_grpc.py
+++ b/scripts/make_operations_grpc.py
@@ -17,6 +17,7 @@
import os
import shutil
import subprocess
+import sys
import tempfile
@@ -42,7 +43,7 @@ def get_pb2_contents_with_grpc():
generated_path = os.path.join(temp_dir, 'google', 'longrunning',
'operations_pb2.py')
try:
- subprocess.check_output([
+ return_code = subprocess.call([
PROTOC_CMD,
'--proto_path',
PROTOS_DIR,
@@ -54,6 +55,8 @@ def get_pb2_contents_with_grpc():
temp_dir,
PROTO_PATH,
])
+ if return_code != 0:
+ sys.exit(return_code)
with open(generated_path, 'rb') as file_obj:
return file_obj.readlines()
finally:
@@ -70,7 +73,7 @@ def get_pb2_contents_without_grpc():
generated_path = os.path.join(temp_dir, 'google', 'longrunning',
'operations_pb2.py')
try:
- subprocess.check_output([
+ return_code = subprocess.call([
PROTOC_CMD,
'--proto_path',
PROTOS_DIR,
@@ -78,6 +81,8 @@ def get_pb2_contents_without_grpc():
temp_dir,
PROTO_PATH,
])
+ if return_code != 0:
+ sys.exit(return_code)
with open(generated_path, 'rb') as file_obj:
return file_obj.readlines()
finally:
diff --git a/scripts/pylintrc_default b/scripts/pylintrc_default
index a06fd338f61d..1f254578f5d7 100644
--- a/scripts/pylintrc_default
+++ b/scripts/pylintrc_default
@@ -27,10 +27,8 @@
# DEFAULT: ignore=CVS
# NOTE: This path must be relative due to the use of
# os.walk in astroid.modutils.get_module_files.
-# RATIONALE:
-# _datastore_v1_pb2.py: protobuf-generated code.
-ignore =
- _datastore_v1_pb2.py
+# RATIONALE: No files to ignore.
+ignore=
# Pickle collected data for later comparisons.
# DEFAULT: persistent=yes
@@ -76,10 +74,6 @@ load-plugins=pylint.extensions.check_docs
# identical implementation but different docstrings.
# - star-args: standard Python idioms for varargs:
# ancestor = Query().filter(*order_props)
-# - method-hidden: Decorating a method in a class (e.g. in _DefaultsContainer)
-# @_lazy_property_deco
-# def dataset_id():
-# ...
# - redefined-variable-type: This error is overzealous and complains at e.g.
# if some_prop:
# return int(value)
@@ -102,7 +96,6 @@ disable =
redefined-builtin,
similarities,
star-args,
- method-hidden,
redefined-variable-type,
wrong-import-position,
diff --git a/scripts/rewrite_imports.py b/scripts/rewrite_imports.py
index 4af5ec9a89fd..7429ec14734c 100644
--- a/scripts/rewrite_imports.py
+++ b/scripts/rewrite_imports.py
@@ -27,6 +27,7 @@
'google.bigtable.admin.cluster.v1': 'gcloud.bigtable._generated',
'google.bigtable.admin.table.v1': 'gcloud.bigtable._generated',
'google.bigtable.v1': 'gcloud.bigtable._generated',
+ 'google.datastore.v1beta3': 'gcloud.datastore._generated',
}
@@ -134,7 +135,8 @@ def rewrite_file(filename):
def main():
"""Rewrites all PB2 files."""
- pb2_files = glob.glob('gcloud/bigtable/_generated/*pb2.py')
+ pb2_files = (glob.glob('gcloud/bigtable/_generated/*pb2.py') +
+ glob.glob('gcloud/datastore/_generated/*pb2.py'))
for filename in pb2_files:
rewrite_file(filename)
diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py
index 7bc418b5cdac..fe5c772d4c5d 100644
--- a/scripts/run_pylint.py
+++ b/scripts/run_pylint.py
@@ -31,12 +31,11 @@
IGNORED_DIRECTORIES = [
- 'gcloud/bigtable/_generated',
- 'gcloud/datastore/_generated',
+ os.path.join('gcloud', 'bigtable', '_generated'),
+ os.path.join('gcloud', 'datastore', '_generated'),
]
IGNORED_FILES = [
- 'gcloud/datastore/_datastore_v1_pb2.py',
- 'docs/conf.py',
+ os.path.join('docs', 'conf.py'),
'setup.py',
]
SCRIPTS_DIR = os.path.abspath(os.path.dirname(__file__))
diff --git a/system_tests/clear_datastore.py b/system_tests/clear_datastore.py
index f544dd484cbb..96d214bc464d 100644
--- a/system_tests/clear_datastore.py
+++ b/system_tests/clear_datastore.py
@@ -21,7 +21,7 @@
from six.moves import input
from gcloud import datastore
-from gcloud.environment_vars import TESTS_DATASET
+from gcloud.environment_vars import TESTS_PROJECT
FETCH_MAX = 20
@@ -90,7 +90,7 @@ def remove_kind(kind, client):
def remove_all_entities(client=None):
if client is None:
# Get a client that uses the test dataset.
- client = datastore.Client(project=os.getenv(TESTS_DATASET))
+ client = datastore.Client(project=os.getenv(TESTS_PROJECT))
for kind in ALL_KINDS:
remove_kind(kind, client)
diff --git a/system_tests/datastore.py b/system_tests/datastore.py
index dbef49e7d857..521c727a4c93 100644
--- a/system_tests/datastore.py
+++ b/system_tests/datastore.py
@@ -19,11 +19,12 @@
import httplib2
import unittest2
+from gcloud import _helpers
from gcloud._helpers import UTC
from gcloud import datastore
-from gcloud.datastore import client as client_mod
+from gcloud.datastore.helpers import GeoPoint
from gcloud.environment_vars import GCD_DATASET
-from gcloud.environment_vars import TESTS_DATASET
+from gcloud.environment_vars import TESTS_PROJECT
from gcloud.exceptions import Conflict
# This assumes the command is being run via tox hence the
# repository root is the current directory.
@@ -55,7 +56,7 @@ def setUpModule():
# Isolated namespace so concurrent test runs don't collide.
test_namespace = 'ns%d' % (1000 * time.time(),)
if emulator_dataset is None:
- client_mod.DATASET = TESTS_DATASET
+ _helpers.PROJECT = TESTS_PROJECT
Config.CLIENT = datastore.Client(namespace=test_namespace)
else:
credentials = EmulatorCreds()
@@ -178,6 +179,32 @@ def test_empty_kind(self):
posts = list(query.fetch(limit=2))
self.assertEqual(posts, [])
+ def test_all_value_types(self):
+ key = Config.CLIENT.key('TestPanObject', 1234)
+ entity = datastore.Entity(key=key)
+ entity['timestamp'] = datetime.datetime(2014, 9, 9, tzinfo=UTC)
+ key_stored = Config.CLIENT.key('SavedKey', 'right-here')
+ entity['key'] = key_stored
+ entity['truthy'] = True
+ entity['float'] = 2.718281828
+ entity['int'] = 3735928559
+ entity['words'] = u'foo'
+ entity['blob'] = b'seekretz'
+ entity_stored = datastore.Entity(key=key_stored)
+ entity_stored['hi'] = 'bye'
+ entity['nested'] = entity_stored
+ entity['items'] = [1, 2, 3]
+ entity['geo'] = GeoPoint(1.0, 2.0)
+ entity['nothing_here'] = None
+
+ # Store the entity.
+ self.case_entities_to_delete.append(entity)
+ Config.CLIENT.put(entity)
+
+ # Check the original and retrieved are the the same.
+ retrieved_entity = Config.CLIENT.get(entity.key)
+ self.assertEqual(retrieved_entity, entity)
+
class TestDatastoreSaveKeys(TestDatastore):
@@ -376,9 +403,9 @@ def test_query_paginate_with_start_cursor(self):
self.assertEqual(new_entities[0]['name'], 'Sansa')
self.assertEqual(new_entities[2]['name'], 'Arya')
- def test_query_group_by(self):
+ def test_query_distinct_on(self):
query = self._base_query()
- query.group_by = ['alive']
+ query.distinct_on = ['alive']
expected_matches = 2
# We expect 2, but allow the query to get 1 extra.
diff --git a/system_tests/local_test_setup.sample b/system_tests/local_test_setup.sample
index 4219c962570b..771638c56985 100644
--- a/system_tests/local_test_setup.sample
+++ b/system_tests/local_test_setup.sample
@@ -1,5 +1,4 @@
export GOOGLE_APPLICATION_CREDENTIALS="app_credentials.json.sample"
export GCLOUD_TESTS_PROJECT_ID="my-project"
-export GCLOUD_TESTS_DATASET_ID=${GCLOUD_TESTS_PROJECT_ID}
export GCLOUD_REMOTE_FOR_LINT="upstream"
export GCLOUD_BRANCH_FOR_LINT="master"
diff --git a/system_tests/populate_datastore.py b/system_tests/populate_datastore.py
index e4be6cfbadad..041471bd0a0c 100644
--- a/system_tests/populate_datastore.py
+++ b/system_tests/populate_datastore.py
@@ -22,7 +22,7 @@
from six.moves import zip
from gcloud import datastore
-from gcloud.environment_vars import TESTS_DATASET
+from gcloud.environment_vars import TESTS_PROJECT
ANCESTOR = ('Book', 'GoT')
@@ -91,7 +91,7 @@ def print_func(message):
def add_characters(client=None):
if client is None:
# Get a client that uses the test dataset.
- client = datastore.Client(project=os.getenv(TESTS_DATASET))
+ client = datastore.Client(project=os.getenv(TESTS_PROJECT))
with client.transaction() as xact:
for key_path, character in zip(KEY_PATHS, CHARACTERS):
if key_path[-1] != character['name']:
diff --git a/system_tests/run_system_test.py b/system_tests/run_system_test.py
index 553b58d29a95..c880a81bfe2a 100644
--- a/system_tests/run_system_test.py
+++ b/system_tests/run_system_test.py
@@ -28,15 +28,6 @@
import system_test_utils
-REQUIREMENTS = {
- 'datastore': ['dataset_id', 'credentials'],
- 'storage': ['project', 'credentials'],
- 'pubsub': ['project', 'credentials'],
- 'bigquery': ['project', 'credentials'],
- 'bigtable': ['project', 'credentials'],
- 'bigtable-happybase': ['project', 'credentials'],
- 'logging': ['project', 'credentials'],
-}
TEST_MODULES = {
'datastore': datastore,
'storage': storage,
@@ -52,7 +43,7 @@ def get_parser():
parser = argparse.ArgumentParser(
description='GCloud test runner against actual project.')
parser.add_argument('--package', dest='package',
- choices=REQUIREMENTS.keys(),
+ choices=TEST_MODULES.keys(),
default='datastore', help='Package to be tested.')
parser.add_argument(
'--ignore-requirements',
@@ -64,8 +55,7 @@ def get_parser():
def run_module_tests(module_name, ignore_requirements=False):
if not ignore_requirements:
# Make sure environ is set before running test.
- requirements = REQUIREMENTS[module_name]
- system_test_utils.check_environ(*requirements)
+ system_test_utils.check_environ()
suite = unittest2.TestSuite()
test_mod = TEST_MODULES[module_name]
diff --git a/system_tests/system_test_utils.py b/system_tests/system_test_utils.py
index eb7b3b5517bb..d936bd05cd74 100644
--- a/system_tests/system_test_utils.py
+++ b/system_tests/system_test_utils.py
@@ -17,13 +17,11 @@
import sys
from gcloud.environment_vars import CREDENTIALS as TEST_CREDENTIALS
-from gcloud.environment_vars import TESTS_DATASET
from gcloud.environment_vars import TESTS_PROJECT
# From shell environ. May be None.
PROJECT_ID = os.getenv(TESTS_PROJECT)
-DATASET_ID = os.getenv(TESTS_DATASET)
CREDENTIALS = os.getenv(TEST_CREDENTIALS)
ENVIRON_ERROR_MSG = """\
@@ -46,21 +44,14 @@ def create_scoped_required():
return False
-def check_environ(*requirements):
-
+def check_environ():
missing = []
- if 'dataset_id' in requirements:
- if DATASET_ID is None:
- missing.append(TESTS_DATASET)
-
- if 'project' in requirements:
- if PROJECT_ID is None:
- missing.append(TESTS_PROJECT)
+ if PROJECT_ID is None:
+ missing.append(TESTS_PROJECT)
- if 'credentials' in requirements:
- if CREDENTIALS is None or not os.path.isfile(CREDENTIALS):
- missing.append(TEST_CREDENTIALS)
+ if CREDENTIALS is None or not os.path.isfile(CREDENTIALS):
+ missing.append(TEST_CREDENTIALS)
if missing:
print(ENVIRON_ERROR_MSG % ', '.join(missing), file=sys.stderr)
diff --git a/tox.ini b/tox.ini
index d9c390dbd633..4f404c74d272 100644
--- a/tox.ini
+++ b/tox.ini
@@ -77,7 +77,7 @@ deps = {[testenv:docs]deps}
passenv = {[testenv:docs]passenv}
[pep8]
-exclude = gcloud/datastore/_generated/*,gcloud/datastore/_datastore_v1_pb2.py,gcloud/bigtable/_generated/*,docs/conf.py,
+exclude = docs/conf.py,gcloud/bigtable/_generated/*,gcloud/datastore/_generated/*
verbose = 1
[testenv:lint]