Skip to content

Commit

Permalink
Merge pull request #1688 from dhermes/add-ds-v1beta3
Browse files Browse the repository at this point in the history
Merging in datastore-v1beta3 branch.
  • Loading branch information
dhermes committed Apr 1, 2016
2 parents 659d279 + 8538095 commit 1c9c12c
Show file tree
Hide file tree
Showing 40 changed files with 4,129 additions and 3,649 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,4 @@ scripts/pylintrc_reduced
# Directories used for creating generated PB2 files
generated_python/
cloud-bigtable-client/
googleapis-pb/
4 changes: 1 addition & 3 deletions CONTRIBUTING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,6 @@ Running System Tests

- ``GCLOUD_TESTS_PROJECT_ID``: Developers Console project ID (e.g.
bamboo-shift-455).
- ``GCLOUD_TESTS_DATASET_ID``: The name of the dataset your tests connect to.
This is typically the same as ``GCLOUD_TESTS_PROJECT_ID``.
- ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
see ``system_tests/app_credentials.json.sample`` as an example. Such a file
can be downloaded directly from the developer's console by clicking
Expand Down Expand Up @@ -195,7 +193,7 @@ Running System Tests

# Create the indexes
$ gcloud preview datastore create-indexes system_tests/data/index.yaml \
> --project=$GCLOUD_TESTS_DATASET_ID
> --project=$GCLOUD_TESTS_PROJECT_ID

# Restore your environment to its previous state.
$ unset CLOUDSDK_PYTHON_SITEPACKAGES
Expand Down
67 changes: 44 additions & 23 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
GENERATED_DIR=$(shell pwd)/generated_python
FINAL_DIR=$(shell pwd)/gcloud/bigtable/_generated
BIGTABLE_DIR=$(shell pwd)/gcloud/bigtable/_generated
DATASTORE_DIR=$(shell pwd)/gcloud/datastore/_generated
GRPC_PLUGIN=grpc_python_plugin
PROTOC_CMD=protoc
PROTOS_DIR=$(shell pwd)/cloud-bigtable-client/bigtable-protos/src/main/proto
BIGTABLE_PROTOS_DIR=$(shell pwd)/cloud-bigtable-client/bigtable-protos/src/main/proto
GOOGLEAPIS_PROTOS_DIR=$(shell pwd)/googleapis-pb

help:
@echo 'Makefile for gcloud-python Bigtable protos '
Expand All @@ -12,42 +14,58 @@ help:
@echo ' make clean Clean generated files '

generate:
[ -d cloud-bigtable-client ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client
# Retrieve git repos that have our *.proto files.
[ -d cloud-bigtable-client ] || git clone https://github.com/GoogleCloudPlatform/cloud-bigtable-client --depth=1
cd cloud-bigtable-client && git pull origin master
[ -d googleapis-pb ] || git clone https://github.com/google/googleapis googleapis-pb --depth=1
cd googleapis-pb && git pull origin master
# Make the directory where our *_pb2.py files will go.
mkdir -p $(GENERATED_DIR)
# Generate all *_pb2.py files that require gRPC.
$(PROTOC_CMD) \
--proto_path=$(PROTOS_DIR) \
--proto_path=$(BIGTABLE_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
--plugin=protoc-gen-grpc=$(GRPC_PLUGIN) \
--grpc_out=$(GENERATED_DIR) \
$(PROTOS_DIR)/google/bigtable/v1/bigtable_service.proto \
$(PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto \
$(PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service.proto
$(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_service.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service.proto
# Generate all *_pb2.py files that do not require gRPC.
$(PROTOC_CMD) \
--proto_path=$(PROTOS_DIR) \
--proto_path=$(BIGTABLE_PROTOS_DIR) \
--proto_path=$(GOOGLEAPIS_PROTOS_DIR) \
--python_out=$(GENERATED_DIR) \
$(PROTOS_DIR)/google/bigtable/v1/bigtable_data.proto \
$(PROTOS_DIR)/google/bigtable/v1/bigtable_service_messages.proto \
$(PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto \
$(PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto \
$(PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_data.proto \
$(PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service_messages.proto
$(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_data.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/bigtable_service_messages.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_data.proto \
$(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/bigtable_table_service_messages.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/datastore.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/entity.proto \
$(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/query.proto
# Move the newly generated *_pb2.py files into our library.
mv $(GENERATED_DIR)/google/bigtable/v1/* $(FINAL_DIR)
mv $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(FINAL_DIR)
mv $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(FINAL_DIR)
mv $(GENERATED_DIR)/google/bigtable/v1/* $(BIGTABLE_DIR)
mv $(GENERATED_DIR)/google/bigtable/admin/cluster/v1/* $(BIGTABLE_DIR)
mv $(GENERATED_DIR)/google/bigtable/admin/table/v1/* $(BIGTABLE_DIR)
mv $(GENERATED_DIR)/google/datastore/v1beta3/* $(DATASTORE_DIR)
# Remove all existing *.proto files before we replace
rm -f $(FINAL_DIR)/*.proto
rm -f $(BIGTABLE_DIR)/*.proto
rm -f $(DATASTORE_DIR)/*.proto
# Copy over the *.proto files into our library.
cp $(PROTOS_DIR)/google/bigtable/v1/*.proto $(FINAL_DIR)
cp $(PROTOS_DIR)/google/bigtable/admin/cluster/v1/*.proto $(FINAL_DIR)
cp $(PROTOS_DIR)/google/bigtable/admin/table/v1/*.proto $(FINAL_DIR)
cp $(PROTOS_DIR)/google/longrunning/operations.proto $(FINAL_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/v1/*.proto $(BIGTABLE_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/cluster/v1/*.proto $(BIGTABLE_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/bigtable/admin/table/v1/*.proto $(BIGTABLE_DIR)
cp $(BIGTABLE_PROTOS_DIR)/google/longrunning/operations.proto $(BIGTABLE_DIR)
cp $(GOOGLEAPIS_PROTOS_DIR)/google/datastore/v1beta3/*.proto $(DATASTORE_DIR)
# Rename all *.proto files in our library with an
# underscore and remove executable bit.
cd $(FINAL_DIR) && \
cd $(BIGTABLE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
done
cd $(DATASTORE_DIR) && \
for filename in *.proto; do \
chmod -x $$filename ; \
mv $$filename _$$filename ; \
Expand All @@ -56,6 +74,9 @@ generate:
# non-gRPC parts so that the protos from `googleapis-common-protos`
# can be used without gRPC.
python scripts/make_operations_grpc.py
# Separate the gRPC parts of the datastore service from the
# non-gRPC parts so that the protos can be used without gRPC.
python scripts/make_datastore_grpc.py
# Rewrite the imports in the generated *_pb2.py files.
python scripts/rewrite_imports.py

Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ writes, strong consistency for reads and ancestor queries, and eventual
consistency for all other queries.

.. _Cloud Datastore: https://cloud.google.com/datastore/docs
.. _Datastore API docs: https://cloud.google.com/datastore/docs/apis/v1beta2/
.. _Datastore API docs: https://cloud.google.com/datastore/docs/apis/v1beta3/

See the ``gcloud-python`` API `datastore documentation`_ to learn how to
interact with the Cloud Datastore using this Client Library.
Expand Down
Loading

0 comments on commit 1c9c12c

Please sign in to comment.