From 9396bc131267c466b916c669e1ef9c0e8841710d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 7 Nov 2017 14:17:51 -0800 Subject: [PATCH 1/5] Renaming `makeResource` -> `make_resource`. (#4355) Done via: $ git grep -l makeResource | xargs sed -i s/makeResource/make_resource/g --- bigquery/tests/unit/test_dataset.py | 4 +- bigquery/tests/unit/test_job.py | 88 ++++++++++++++--------------- bigquery/tests/unit/test_query.py | 44 +++++++-------- bigquery/tests/unit/test_schema.py | 8 +-- bigquery/tests/unit/test_table.py | 4 +- dns/tests/unit/test_changes.py | 16 +++--- dns/tests/unit/test_zone.py | 14 ++--- 7 files changed, 89 insertions(+), 89 deletions(-) diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index 1f8580ebbcfd7..8a34b82a460d2 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -206,7 +206,7 @@ def _setUpConstants(self): self.DS_FULL_ID = '%s:%s' % (self.PROJECT, self.DS_ID) self.RESOURCE_URL = 'http://example.com/path/to/resource' - def _makeResource(self): + def _make_resource(self): self._setUpConstants() USER_EMAIL = 'phred@example.com' GROUP_EMAIL = 'group-name@lists.example.com' @@ -422,7 +422,7 @@ def test_from_api_repr_bare(self): self._verify_resource_properties(dataset, RESOURCE) def test_from_api_repr_w_properties(self): - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() klass = self._get_target_class() dataset = klass.from_api_repr(RESOURCE) self._verify_resource_properties(dataset, RESOURCE) diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 4f94a1881a305..95000b24e8ea4 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -114,7 +114,7 @@ def _table_ref(self, table_id): return TableReference(self.DS_REF, table_id) - def _makeResource(self, started=False, ended=False): + def _make_resource(self, started=False, ended=False): self._setUpConstants() resource = { 'configuration': { @@ -219,8 +219,8 @@ def _setUpConstants(self): self.OUTPUT_BYTES = 23456 self.OUTPUT_ROWS = 345 - def _makeResource(self, started=False, ended=False): - resource = super(TestLoadJob, self)._makeResource( + def _make_resource(self, started=False, ended=False): + resource = super(TestLoadJob, self)._make_resource( started, ended) config = resource['configuration']['load'] config['sourceUris'] = [self.SOURCE1] @@ -374,13 +374,13 @@ def test_ctor_w_config(self): def test_done(self): client = _make_client(project=self.PROJECT) - resource = self._makeResource(ended=True) + resource = self._make_resource(ended=True) job = self._get_target_class().from_api_repr(resource, client) self.assertTrue(job.done()) def test_result(self): client = _make_client(project=self.PROJECT) - resource = self._makeResource(ended=True) + resource = self._make_resource(ended=True) job = self._get_target_class().from_api_repr(resource, client) result = job.result() @@ -388,7 +388,7 @@ def test_result(self): self.assertIs(result, job) def test_result_invokes_begin(self): - begun_resource = self._makeResource() + begun_resource = self._make_resource() done_resource = copy.deepcopy(begun_resource) done_resource['status'] = {'state': 'DONE'} connection = _Connection(begun_resource, done_resource) @@ -537,7 +537,7 @@ def test_from_api_repr_bare(self): def test_from_api_repr_w_properties(self): client = _make_client(project=self.PROJECT) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() load_config = RESOURCE['configuration']['load'] load_config['createDisposition'] = 'CREATE_IF_NEEDED' klass = self._get_target_class() @@ -557,7 +557,7 @@ def test_begin_w_already_running(self): def test_begin_w_bound_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -595,7 +595,7 @@ def test_begin_w_bound_client(self): def test_begin_w_autodetect(self): path = '/projects/{}/jobs'.format(self.PROJECT) - resource = self._makeResource() + resource = self._make_resource() resource['configuration']['load']['autodetect'] = True # Ensure None for missing server-set props del resource['statistics']['creationTime'] @@ -639,7 +639,7 @@ def test_begin_w_alternate_client(self): from google.cloud.bigquery.schema import SchemaField PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource(ended=True) + RESOURCE = self._make_resource(ended=True) LOAD_CONFIGURATION = { 'sourceUris': [self.SOURCE1], 'destinationTable': { @@ -743,7 +743,7 @@ def test_exists_hit_w_alternate_client(self): def test_reload_w_bound_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) table = _Table() @@ -759,7 +759,7 @@ def test_reload_w_bound_client(self): def test_reload_w_alternate_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn1 = _Connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) @@ -778,7 +778,7 @@ def test_reload_w_alternate_client(self): def test_cancel_w_bound_client(self): PATH = '/projects/%s/jobs/%s/cancel' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource(ended=True) + RESOURCE = self._make_resource(ended=True) RESPONSE = {'job': RESOURCE} conn = _Connection(RESPONSE) client = _make_client(project=self.PROJECT, connection=conn) @@ -795,7 +795,7 @@ def test_cancel_w_bound_client(self): def test_cancel_w_alternate_client(self): PATH = '/projects/%s/jobs/%s/cancel' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource(ended=True) + RESOURCE = self._make_resource(ended=True) RESPONSE = {'job': RESOURCE} conn1 = _Connection() client1 = _make_client(project=self.PROJECT, connection=conn1) @@ -825,8 +825,8 @@ def _get_target_class(): return CopyJob - def _makeResource(self, started=False, ended=False): - resource = super(TestCopyJob, self)._makeResource( + def _make_resource(self, started=False, ended=False): + resource = super(TestCopyJob, self)._make_resource( started, ended) config = resource['configuration']['copy'] config['sourceTables'] = [{ @@ -997,7 +997,7 @@ def test_from_api_repr_wo_sources(self): def test_from_api_repr_w_properties(self): client = _make_client(project=self.PROJECT) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() copy_config = RESOURCE['configuration']['copy'] copy_config['createDisposition'] = 'CREATE_IF_NEEDED' klass = self._get_target_class() @@ -1007,7 +1007,7 @@ def test_from_api_repr_w_properties(self): def test_begin_w_bound_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -1050,7 +1050,7 @@ def test_begin_w_bound_client(self): def test_begin_w_alternate_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource(ended=True) + RESOURCE = self._make_resource(ended=True) COPY_CONFIGURATION = { 'sourceTables': [{ 'projectId': self.PROJECT, @@ -1134,7 +1134,7 @@ def test_exists_hit_w_alternate_client(self): def test_reload_w_bound_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source = self._table_ref(self.SOURCE_TABLE) @@ -1151,7 +1151,7 @@ def test_reload_w_bound_client(self): def test_reload_w_alternate_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn1 = _Connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) @@ -1181,8 +1181,8 @@ def _get_target_class(): return ExtractJob - def _makeResource(self, started=False, ended=False): - resource = super(TestExtractJob, self)._makeResource( + def _make_resource(self, started=False, ended=False): + resource = super(TestExtractJob, self)._make_resource( started, ended) config = resource['configuration']['extract'] config['sourceTable'] = { @@ -1316,7 +1316,7 @@ def test_from_api_repr_bare(self): def test_from_api_repr_w_properties(self): client = _make_client(project=self.PROJECT) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() extract_config = RESOURCE['configuration']['extract'] extract_config['compression'] = 'GZIP' klass = self._get_target_class() @@ -1326,7 +1326,7 @@ def test_from_api_repr_w_properties(self): def test_begin_w_bound_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -1366,7 +1366,7 @@ def test_begin_w_bound_client(self): def test_begin_w_alternate_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource(ended=True) + RESOURCE = self._make_resource(ended=True) EXTRACT_CONFIGURATION = { 'sourceTable': { 'projectId': self.PROJECT, @@ -1450,7 +1450,7 @@ def test_exists_hit_w_alternate_client(self): def test_reload_w_bound_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) source_dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -1468,7 +1468,7 @@ def test_reload_w_bound_client(self): def test_reload_w_alternate_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn1 = _Connection() client1 = _make_client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) @@ -1562,8 +1562,8 @@ def _get_target_class(): return QueryJob - def _makeResource(self, started=False, ended=False): - resource = super(TestQueryJob, self)._makeResource( + def _make_resource(self, started=False, ended=False): + resource = super(TestQueryJob, self)._make_resource( started, ended) config = resource['configuration']['query'] config['query'] = self.QUERY @@ -1788,7 +1788,7 @@ def test_from_api_repr_bare(self): def test_from_api_repr_w_properties(self): client = _make_client(project=self.PROJECT) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() query_config = RESOURCE['configuration']['query'] query_config['createDisposition'] = 'CREATE_IF_NEEDED' query_config['writeDisposition'] = 'WRITE_TRUNCATE' @@ -1816,7 +1816,7 @@ def test_cancelled(self): def test_done(self): client = _make_client(project=self.PROJECT) - resource = self._makeResource(ended=True) + resource = self._make_resource(ended=True) job = self._get_target_class().from_api_repr(resource, client) self.assertTrue(job.done()) @@ -2137,7 +2137,7 @@ def test_result(self): } connection = _Connection(query_resource, query_resource) client = _make_client(self.PROJECT, connection=connection) - resource = self._makeResource(ended=True) + resource = self._make_resource(ended=True) job = self._get_target_class().from_api_repr(resource, client) result = job.result() @@ -2145,7 +2145,7 @@ def test_result(self): self.assertEqual(list(result), []) def test_result_invokes_begins(self): - begun_resource = self._makeResource() + begun_resource = self._make_resource() incomplete_resource = { 'jobComplete': False, 'jobReference': { @@ -2172,7 +2172,7 @@ def test_result_invokes_begins(self): self.assertEqual(reload_request['method'], 'GET') def test_result_w_timeout(self): - begun_resource = self._makeResource() + begun_resource = self._make_resource() query_resource = { 'jobComplete': True, 'jobReference': { @@ -2229,7 +2229,7 @@ def test_begin_w_bound_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) DS_ID = 'DATASET' - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -2277,7 +2277,7 @@ def test_begin_w_alternate_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) TABLE = 'TABLE' DS_ID = 'DATASET' - RESOURCE = self._makeResource(ended=True) + RESOURCE = self._make_resource(ended=True) QUERY_CONFIGURATION = { 'query': self.QUERY, 'allowLargeResults': True, @@ -2351,7 +2351,7 @@ def test_begin_w_udf(self): RESOURCE_URI = 'gs://some-bucket/js/lib.js' INLINE_UDF_CODE = 'var someCode = "here";' PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -2405,7 +2405,7 @@ def test_begin_w_named_query_parameter(self): query_parameters = [ScalarQueryParameter('foo', 'INT64', 123)] PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -2461,7 +2461,7 @@ def test_begin_w_positional_query_parameter(self): query_parameters = [ScalarQueryParameter.positional('INT64', 123)] PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -2517,7 +2517,7 @@ def test_begin_w_table_defs(self): from google.cloud.bigquery.external_config import BigtableColumnFamily PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -2601,7 +2601,7 @@ def test_dry_run_query(self): from google.cloud.bigquery.job import QueryJobConfig PATH = '/projects/%s/jobs' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() # Ensure None for missing server-set props del RESOURCE['statistics']['creationTime'] del RESOURCE['etag'] @@ -2675,7 +2675,7 @@ def test_reload_w_bound_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) DS_ID = 'DATASET' DEST_TABLE = 'dest_table' - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _make_client(project=self.PROJECT, connection=conn) dataset_ref = DatasetReference(self.PROJECT, DS_ID) @@ -2698,7 +2698,7 @@ def test_reload_w_alternate_client(self): PATH = '/projects/%s/jobs/%s' % (self.PROJECT, self.JOB_ID) DS_ID = 'DATASET' DEST_TABLE = 'dest_table' - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() q_config = RESOURCE['configuration']['query'] q_config['destinationTable'] = { 'projectId': self.PROJECT, diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index 1924d55c99592..35def936946b1 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -987,7 +987,7 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def _makeResource(self): + def _make_resource(self): return { 'jobReference': { 'projectId': self.PROJECT, @@ -1013,7 +1013,7 @@ def _verifySchema(self, query, resource): self.assertEqual(query.schema, ()) def test_ctor_defaults(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.cache_hit) self.assertIsNone(query.complete) self.assertIsNone(query.errors) @@ -1025,34 +1025,34 @@ def test_ctor_defaults(self): self.assertIsNone(query.total_bytes_processed) def test_cache_hit_missing(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.cache_hit) def test_cache_hit_present(self): - resource = self._makeResource() + resource = self._make_resource() resource['cacheHit'] = True query = self._make_one(resource) self.assertTrue(query.cache_hit) def test_complete_missing(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.complete) def test_complete_present(self): - resource = self._makeResource() + resource = self._make_resource() resource['jobComplete'] = True query = self._make_one(resource) self.assertTrue(query.complete) def test_errors_missing(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.errors) def test_errors_present(self): ERRORS = [ {'reason': 'testing'}, ] - resource = self._makeResource() + resource = self._make_resource() resource['errors'] = ERRORS query = self._make_one(resource) self.assertEqual(query.errors, ERRORS) @@ -1067,69 +1067,69 @@ def test_job_id_broken_job_reference(self): self._make_one(resource) def test_job_id_present(self): - resource = self._makeResource() + resource = self._make_resource() resource['jobReference']['jobId'] = 'custom-job' query = self._make_one(resource) self.assertEqual(query.job_id, 'custom-job') def test_page_token_missing(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.page_token) def test_page_token_present(self): - resource = self._makeResource() + resource = self._make_resource() resource['pageToken'] = 'TOKEN' query = self._make_one(resource) self.assertEqual(query.page_token, 'TOKEN') def test_total_rows_present_integer(self): - resource = self._makeResource() + resource = self._make_resource() resource['totalRows'] = 42 query = self._make_one(resource) self.assertEqual(query.total_rows, 42) def test_total_rows_present_string(self): - resource = self._makeResource() + resource = self._make_resource() resource['totalRows'] = '42' query = self._make_one(resource) self.assertEqual(query.total_rows, 42) def test_total_bytes_processed_missing(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.total_bytes_processed) def test_total_bytes_processed_present_integer(self): - resource = self._makeResource() + resource = self._make_resource() resource['totalBytesProcessed'] = 123456 query = self._make_one(resource) self.assertEqual(query.total_bytes_processed, 123456) def test_total_bytes_processed_present_string(self): - resource = self._makeResource() + resource = self._make_resource() resource['totalBytesProcessed'] = '123456' query = self._make_one(resource) self.assertEqual(query.total_bytes_processed, 123456) def test_num_dml_affected_rows_missing(self): - query = self._make_one(self._makeResource()) + query = self._make_one(self._make_resource()) self.assertIsNone(query.num_dml_affected_rows) def test_num_dml_affected_rows_present_integer(self): - resource = self._makeResource() + resource = self._make_resource() resource['numDmlAffectedRows'] = 123456 query = self._make_one(resource) self.assertEqual(query.num_dml_affected_rows, 123456) def test_num_dml_affected_rows_present_string(self): - resource = self._makeResource() + resource = self._make_resource() resource['numDmlAffectedRows'] = '123456' query = self._make_one(resource) self.assertEqual(query.num_dml_affected_rows, 123456) def test_schema(self): - query = self._make_one(self._makeResource()) - self._verifySchema(query, self._makeResource()) - resource = self._makeResource() + query = self._make_one(self._make_resource()) + self._verifySchema(query, self._make_resource()) + resource = self._make_resource() resource['schema'] = { 'fields': [ {'name': 'full_name', 'type': 'STRING', 'mode': 'REQURED'}, diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index e9c13f75ea3df..a5d5ecacd6195 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -261,7 +261,7 @@ def _call_fut(self, resource): return _parse_schema_resource(resource) - def _makeResource(self): + def _make_resource(self): return { 'schema': {'fields': [ {'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'}, @@ -270,12 +270,12 @@ def _makeResource(self): } def test__parse_schema_resource_defaults(self): - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() schema = self._call_fut(RESOURCE['schema']) self._verifySchema(schema, RESOURCE) def test__parse_schema_resource_subfields(self): - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() RESOURCE['schema']['fields'].append( {'name': 'phone', 'type': 'RECORD', @@ -290,7 +290,7 @@ def test__parse_schema_resource_subfields(self): self._verifySchema(schema, RESOURCE) def test__parse_schema_resource_fields_without_mode(self): - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() RESOURCE['schema']['fields'].append( {'name': 'phone', 'type': 'STRING'}) diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index 9bdd62c7404ed..888974c3cdaf1 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -177,7 +177,7 @@ def _setUpConstants(self): self.NUM_EST_BYTES = 1234 self.NUM_EST_ROWS = 23 - def _makeResource(self): + def _make_resource(self): self._setUpConstants() return { 'creationTime': self.WHEN_TS * 1000, @@ -576,7 +576,7 @@ def test_from_api_repr_w_properties(self): from google.cloud._helpers import UTC from google.cloud._helpers import _millis - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() RESOURCE['view'] = {'query': 'select fullname, age from person_ages'} RESOURCE['type'] = 'VIEW' RESOURCE['location'] = 'EU' diff --git a/dns/tests/unit/test_changes.py b/dns/tests/unit/test_changes.py index cc92973da9e79..ffe76081a2c77 100644 --- a/dns/tests/unit/test_changes.py +++ b/dns/tests/unit/test_changes.py @@ -35,7 +35,7 @@ def _setUpConstants(self): self.WHEN = _NOW().replace(tzinfo=UTC) - def _makeResource(self): + def _make_resource(self): from google.cloud._helpers import _datetime_to_rfc3339 when_str = _datetime_to_rfc3339(self.WHEN) @@ -98,7 +98,7 @@ def test_ctor(self): def test_from_api_repr_missing_additions_deletions(self): self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() del RESOURCE['additions'] del RESOURCE['deletions'] zone = _Zone() @@ -110,7 +110,7 @@ def test_from_api_repr_missing_additions_deletions(self): def test_from_api_repr(self): self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() zone = _Zone() klass = self._get_target_class() @@ -166,7 +166,7 @@ def test_delete_record_set(self): def test_create_wo_additions_or_deletions(self): self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) zone = _Zone(client) @@ -181,7 +181,7 @@ def test_create_w_bound_client(self): from google.cloud.dns.resource_record_set import ResourceRecordSet self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() PATH = 'projects/%s/managedZones/%s/changes' % ( self.PROJECT, self.ZONE_NAME) conn = _Connection(RESOURCE) @@ -210,7 +210,7 @@ def test_create_w_alternate_client(self): from google.cloud.dns.resource_record_set import ResourceRecordSet self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() PATH = 'projects/%s/managedZones/%s/changes' % ( self.PROJECT, self.ZONE_NAME) conn1 = _Connection() @@ -280,7 +280,7 @@ def test_reload_w_bound_client(self): PATH = 'projects/%s/managedZones/%s/changes/%s' % ( self.PROJECT, self.ZONE_NAME, self.CHANGES_NAME) self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) zone = _Zone(client) @@ -299,7 +299,7 @@ def test_reload_w_alternate_client(self): PATH = 'projects/%s/managedZones/%s/changes/%s' % ( self.PROJECT, self.ZONE_NAME, self.CHANGES_NAME) self._setUpConstants() - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) diff --git a/dns/tests/unit/test_zone.py b/dns/tests/unit/test_zone.py index 380c5f5d81db1..c581745c328d3 100644 --- a/dns/tests/unit/test_zone.py +++ b/dns/tests/unit/test_zone.py @@ -48,7 +48,7 @@ def _setUpConstants(self): year, month, day, hour, minute, seconds, micros, tzinfo=UTC) self.ZONE_ID = 12345 - def _makeResource(self): + def _make_resource(self): self._setUpConstants() return { 'name': self.ZONE_NAME, @@ -153,7 +153,7 @@ def test_from_api_repr_bare(self): def test_from_api_repr_w_properties(self): self._setUpConstants() client = _Client(self.PROJECT) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() klass = self._get_target_class() zone = klass.from_api_repr(RESOURCE, client=client) self.assertIs(zone._client, client) @@ -211,7 +211,7 @@ def test_changes(self): def test_create_w_bound_client(self): PATH = 'projects/%s/managedZones' % self.PROJECT - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) zone = self._make_one(self.ZONE_NAME, self.DNS_NAME, client) @@ -234,7 +234,7 @@ def test_create_w_alternate_client(self): PATH = 'projects/%s/managedZones' % self.PROJECT DESCRIPTION = 'DESCRIPTION' NAME_SERVER_SET = 'NAME_SERVER_SET' - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() RESOURCE['nameServerSet'] = NAME_SERVER_SET RESOURCE['description'] = DESCRIPTION conn1 = _Connection() @@ -293,7 +293,7 @@ def test_create_w_missing_output_properties(self): # In the wild, the resource returned from 'zone.create' sometimes # lacks 'creationTime' / 'lastModifiedTime' PATH = 'projects/%s/managedZones' % (self.PROJECT,) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() del RESOURCE['creationTime'] del RESOURCE['id'] del RESOURCE['nameServers'] @@ -349,7 +349,7 @@ def test_exists_hit_w_alternate_client(self): def test_reload_w_bound_client(self): PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) zone = self._make_one(self.ZONE_NAME, client=client) @@ -366,7 +366,7 @@ def test_reload_w_bound_client(self): def test_reload_w_alternate_client(self): PATH = 'projects/%s/managedZones/%s' % (self.PROJECT, self.ZONE_NAME) - RESOURCE = self._makeResource() + RESOURCE = self._make_resource() conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection(RESOURCE) From 9380ed910d491e4dd2e0731af03990cd5ae4d26f Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 8 Nov 2017 11:55:37 -0800 Subject: [PATCH 2/5] minor typo (#4361) --- spanner/tests/system/test_system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index 3843ea94496c2..fe225b6c70db2 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -520,7 +520,7 @@ def _transaction_read_then_raise(self, transaction): raise CustomException() @RetryErrors(exception=GrpcRendezvous) - def test_transaction_read_and_insert_then_execption(self): + def test_transaction_read_and_insert_then_exception(self): retry = RetryInstanceState(_has_all_ddl) retry(self._db.reload)() From 6c602823e7101b40dfe5bfc614da633beb8accfb Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 8 Nov 2017 12:19:20 -0800 Subject: [PATCH 3/5] Bump natural language to GA. (#4362) --- README.rst | 13 ++++++++++++- docs/language/releases.rst | 1 + docs/language/usage.rst | 4 +++- language/README.rst | 5 +++++ language/setup.py | 4 ++-- setup.py | 4 ++-- 6 files changed, 25 insertions(+), 6 deletions(-) diff --git a/README.rst b/README.rst index 4963d6b0da6bf..460064e732c4a 100644 --- a/README.rst +++ b/README.rst @@ -15,9 +15,15 @@ Google Cloud Python Client .. _API Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/ .. _Read The Docs Documentation: https://google-cloud-python.readthedocs.io/en/latest/ +.. note:: + + These libraries currently do not run on Google App Engine Standard. + We are actively working on adding this support. + The following client libraries have **GA** support: - `Google Cloud Datastore`_ (`Datastore README`_) +- `Google Cloud Natural Language`_ (`Natural Language README`_) - `Google Cloud Storage`_ (`Storage README`_) - `Google Cloud Translation`_ (`Translation README`_) - `Stackdriver Logging`_ (`Logging README`_) @@ -29,11 +35,16 @@ of critical security issues) or with an extensive deprecation period. Issues and requests against GA libraries are addressed with the highest priority. +.. note:: + + Sub-components of GA libraries explicitly marked as beta in the + import path (e.g. ``google.cloud.language_v1beta2``) should be considered + to be beta. + The following client libraries have **beta** support: - `Google BigQuery`_ (`BigQuery README`_) - `Google Cloud Firestore`_ (`Firestore README`_) -- `Google Cloud Natural Language`_ (`Natural Language README`_) - `Google Cloud Pub/Sub`_ (`Pub/Sub README`_) - `Google Cloud Spanner`_ (`Spanner README`_) - `Google Cloud Speech`_ (`Speech README`_) diff --git a/docs/language/releases.rst b/docs/language/releases.rst index c35684df3cc7c..91b1c084234f5 100644 --- a/docs/language/releases.rst +++ b/docs/language/releases.rst @@ -19,6 +19,7 @@ * ``0.29.0`` (`PyPI `__, `Release Notes `__) * ``0.30.0`` (`PyPI `__, `Release Notes `__) * ``0.31.0`` (`PyPI `__, `Release Notes `__) +* ``1.0.0`` (`PyPI `__, `Release Notes `__) ****************************************** ``google-cloud-natural-language`` Releases diff --git a/docs/language/usage.rst b/docs/language/usage.rst index af73d416538c9..7b800fe0714c5 100644 --- a/docs/language/usage.rst +++ b/docs/language/usage.rst @@ -51,12 +51,13 @@ or pass in ``credentials`` explicitly. Documents ********* -The Google Natural Language API has three supported methods +The Google Natural Language API has the following supported methods: - `analyzeEntities`_ - `analyzeSentiment`_ - `analyzeEntitySentiment`_ - `annotateText`_ +- `classifyText`_ and each method uses a :class:`~.language_v1.types.Document` for representing text. @@ -116,6 +117,7 @@ to content stored in `Google Cloud Storage`_. .. _analyzeSentiment: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/analyzeSentiment .. _analyzeEntitySentiment: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/analyzeEntitySentiment .. _annotateText: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/annotateText +.. _classifyText: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/classifyText .. _Google Cloud Storage: https://cloud.google.com/storage/ **************** diff --git a/language/README.rst b/language/README.rst index 6d8ab60202141..db07a7c418744 100644 --- a/language/README.rst +++ b/language/README.rst @@ -11,6 +11,11 @@ Python Client for Google Cloud Natural Language .. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/language/usage.html +.. note:: + + This library currently does not run on Google App Engine Standard. + We are actively working on adding this support. + Quick Start ----------- diff --git a/language/setup.py b/language/setup.py index 8c8193bcbd7b2..48a97f8cd7fd5 100644 --- a/language/setup.py +++ b/language/setup.py @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -60,7 +60,7 @@ setup( name='google-cloud-language', - version='0.31.1.dev1', + version='1.0.0', description='Python Client for Google Cloud Natural Language', long_description=README, namespace_packages=[ diff --git a/setup.py b/setup.py index 041314682730b..bbda94ac94deb 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ 'google-cloud-dns >= 0.28.0, < 0.29dev', 'google-cloud-error-reporting >= 0.28.0, < 0.29dev', 'google-cloud-firestore >= 0.28.0, < 0.29dev', - 'google-cloud-language >= 0.31.0, < 0.32dev', + 'google-cloud-language >= 1.0.0, < 1.1dev', 'google-cloud-logging >= 1.4.0, < 1.5dev', 'google-cloud-monitoring >= 0.28.0, < 0.29dev', 'google-cloud-pubsub >= 0.29.0, < 0.30dev', @@ -74,7 +74,7 @@ setup( name='google-cloud', - version='0.29.1.dev1', + version='0.30.0', description='API Client library for Google Cloud', long_description=README, install_requires=REQUIREMENTS, From fde9109fd4e561d561c06b3eb4d4ced7adb29fc0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 8 Nov 2017 14:15:05 -0800 Subject: [PATCH 4/5] New Datastore auto-gen. (#4348) --- datastore/google/cloud/datastore/_gax.py | 10 +- datastore/google/cloud/datastore/_http.py | 2 +- datastore/google/cloud/datastore/batch.py | 2 +- datastore/google/cloud/datastore/client.py | 7 +- datastore/google/cloud/datastore/helpers.py | 2 +- datastore/google/cloud/datastore/key.py | 2 +- datastore/google/cloud/datastore/query.py | 6 +- .../google/cloud/datastore_v1/__init__.py | 30 + .../cloud/datastore_v1/gapic/__init__.py | 0 .../datastore_v1/gapic/datastore_client.py | 519 +++++ .../gapic/datastore_client_config.py | 58 + .../google/cloud/datastore_v1/gapic/enums.py | 150 ++ .../cloud/datastore_v1/proto/__init__.py | 0 .../cloud/datastore_v1/proto/datastore_pb2.py | 1745 +++++++++++++++++ .../datastore_v1/proto/datastore_pb2_grpc.py | 163 ++ .../cloud/datastore_v1/proto/entity_pb2.py | 661 +++++++ .../datastore_v1/proto/entity_pb2_grpc.py | 3 + .../cloud/datastore_v1/proto/query_pb2.py | 1145 +++++++++++ .../datastore_v1/proto/query_pb2_grpc.py | 3 + datastore/google/cloud/datastore_v1/types.py | 46 + datastore/setup.py | 6 +- datastore/tests/unit/test__gax.py | 24 +- datastore/tests/unit/test__http.py | 56 +- datastore/tests/unit/test_batch.py | 22 +- datastore/tests/unit/test_client.py | 77 +- datastore/tests/unit/test_helpers.py | 64 +- datastore/tests/unit/test_key.py | 2 +- datastore/tests/unit/test_query.py | 30 +- datastore/tests/unit/test_transaction.py | 12 +- 29 files changed, 4703 insertions(+), 144 deletions(-) create mode 100644 datastore/google/cloud/datastore_v1/__init__.py create mode 100644 datastore/google/cloud/datastore_v1/gapic/__init__.py create mode 100644 datastore/google/cloud/datastore_v1/gapic/datastore_client.py create mode 100644 datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py create mode 100644 datastore/google/cloud/datastore_v1/gapic/enums.py create mode 100644 datastore/google/cloud/datastore_v1/proto/__init__.py create mode 100644 datastore/google/cloud/datastore_v1/proto/datastore_pb2.py create mode 100644 datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py create mode 100644 datastore/google/cloud/datastore_v1/proto/entity_pb2.py create mode 100644 datastore/google/cloud/datastore_v1/proto/entity_pb2_grpc.py create mode 100644 datastore/google/cloud/datastore_v1/proto/query_pb2.py create mode 100644 datastore/google/cloud/datastore_v1/proto/query_pb2_grpc.py create mode 100644 datastore/google/cloud/datastore_v1/types.py diff --git a/datastore/google/cloud/datastore/_gax.py b/datastore/google/cloud/datastore/_gax.py index 2d3e7459f6dd6..4eb54eb7681d1 100644 --- a/datastore/google/cloud/datastore/_gax.py +++ b/datastore/google/cloud/datastore/_gax.py @@ -18,7 +18,8 @@ import contextlib import sys -from google.cloud.gapic.datastore.v1 import datastore_client +from google.api_core.gapic_v1 import client_info +from google.cloud.datastore_v1.gapic import datastore_client from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from google.gax.utils import metrics @@ -231,4 +232,9 @@ def make_datastore_api(client): channel = insecure_channel(host) return GAPICDatastoreAPI( - channel=channel, lib_name='gccl', lib_version=__version__) + channel=channel, + client_info=client_info.ClientInfo( + client_library_version=__version__, + gapic_version=__version__, + ), + ) diff --git a/datastore/google/cloud/datastore/_http.py b/datastore/google/cloud/datastore/_http.py index 02bce52b730e6..a161b9b096c05 100644 --- a/datastore/google/cloud/datastore/_http.py +++ b/datastore/google/cloud/datastore/_http.py @@ -18,7 +18,7 @@ from google.cloud import _http as connection_module from google.cloud import exceptions -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 from google.cloud.datastore import __version__ diff --git a/datastore/google/cloud/datastore/batch.py b/datastore/google/cloud/datastore/batch.py index 5dd7a3146e7ce..49be09964eb3f 100644 --- a/datastore/google/cloud/datastore/batch.py +++ b/datastore/google/cloud/datastore/batch.py @@ -22,7 +22,7 @@ """ from google.cloud.datastore import helpers -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 class Batch(object): diff --git a/datastore/google/cloud/datastore/client.py b/datastore/google/cloud/datastore/client.py index 4a4228a6b7f27..ec522cc5c1cca 100644 --- a/datastore/google/cloud/datastore/client.py +++ b/datastore/google/cloud/datastore/client.py @@ -15,7 +15,7 @@ import os -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 from google.cloud._helpers import _LocalStack from google.cloud._helpers import ( @@ -135,7 +135,10 @@ def _extended_lookup(datastore_api, project, key_pbs, while loop_num < _MAX_LOOPS: # loop against possible deferred. loop_num += 1 lookup_response = datastore_api.lookup( - project, read_options, key_pbs) + project_id=project, + read_options=read_options, + keys=key_pbs, + ) # Accumulate the new results. results.extend(result.entity for result in lookup_response.found) diff --git a/datastore/google/cloud/datastore/helpers.py b/datastore/google/cloud/datastore/helpers.py index 056376965725b..11e21aa46da05 100644 --- a/datastore/google/cloud/datastore/helpers.py +++ b/datastore/google/cloud/datastore/helpers.py @@ -26,7 +26,7 @@ from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _pb_timestamp_to_datetime -from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key diff --git a/datastore/google/cloud/datastore/key.py b/datastore/google/cloud/datastore/key.py index f2581b48714c2..74d23e49265ce 100644 --- a/datastore/google/cloud/datastore/key.py +++ b/datastore/google/cloud/datastore/key.py @@ -18,7 +18,7 @@ import copy import six -from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 from google.cloud._helpers import _to_bytes from google.cloud.datastore import _app_engine_key_pb2 diff --git a/datastore/google/cloud/datastore/query.py b/datastore/google/cloud/datastore/query.py index 477eccb043959..28febdd1d4222 100644 --- a/datastore/google/cloud/datastore/query.py +++ b/datastore/google/cloud/datastore/query.py @@ -19,9 +19,9 @@ from google.api_core import page_iterator from google.cloud._helpers import _ensure_tuple_or_list -from google.cloud.proto.datastore.v1 import datastore_pb2 as _datastore_pb2 -from google.cloud.proto.datastore.v1 import entity_pb2 as _entity_pb2 -from google.cloud.proto.datastore.v1 import query_pb2 as _query_pb2 +from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as _entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 as _query_pb2 from google.cloud.datastore import helpers from google.cloud.datastore.key import Key diff --git a/datastore/google/cloud/datastore_v1/__init__.py b/datastore/google/cloud/datastore_v1/__init__.py new file mode 100644 index 0000000000000..5157e60d4e801 --- /dev/null +++ b/datastore/google/cloud/datastore_v1/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.datastore_v1 import types +from google.cloud.datastore_v1.gapic import datastore_client +from google.cloud.datastore_v1.gapic import enums + + +class DatastoreClient(datastore_client.DatastoreClient): + __doc__ = datastore_client.DatastoreClient.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'DatastoreClient', ) diff --git a/datastore/google/cloud/datastore_v1/gapic/__init__.py b/datastore/google/cloud/datastore_v1/gapic/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/datastore/google/cloud/datastore_v1/gapic/datastore_client.py b/datastore/google/cloud/datastore_v1/gapic/datastore_client.py new file mode 100644 index 0000000000000..983e30dce902c --- /dev/null +++ b/datastore/google/cloud/datastore_v1/gapic/datastore_client.py @@ -0,0 +1,519 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/datastore/v1/datastore.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.datastore.v1 Datastore API.""" + +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.protobuf_helpers + +from google.cloud.datastore_v1.gapic import datastore_client_config +from google.cloud.datastore_v1.gapic import enums +from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-datastore', ).version + + +class DatastoreClient(object): + """ + Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + """ + + SERVICE_ADDRESS = 'datastore.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/datastore', ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary + _INTERFACE_NAME = ('google.datastore.v1.Datastore') + + def __init__(self, + channel=None, + credentials=None, + client_config=datastore_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. If specified, then the ``credentials`` + argument is ignored. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): + A dictionary of call options for each method. If not specified + the default configuration is used. Generally, you only need + to set this if you're developing your own client library. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + if channel is not None and credentials is not None: + raise ValueError( + 'channel and credentials arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__)) + + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES) + + self.datastore_stub = (datastore_pb2.DatastoreStub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + interface_config = client_config['interfaces'][self._INTERFACE_NAME] + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + interface_config) + + self._lookup = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.Lookup, + default_retry=method_configs['Lookup'].retry, + default_timeout=method_configs['Lookup'].timeout, + client_info=client_info) + self._run_query = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.RunQuery, + default_retry=method_configs['RunQuery'].retry, + default_timeout=method_configs['RunQuery'].timeout, + client_info=client_info) + self._begin_transaction = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.BeginTransaction, + default_retry=method_configs['BeginTransaction'].retry, + default_timeout=method_configs['BeginTransaction'].timeout, + client_info=client_info) + self._commit = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.Commit, + default_retry=method_configs['Commit'].retry, + default_timeout=method_configs['Commit'].timeout, + client_info=client_info) + self._rollback = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.Rollback, + default_retry=method_configs['Rollback'].retry, + default_timeout=method_configs['Rollback'].timeout, + client_info=client_info) + self._allocate_ids = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.AllocateIds, + default_retry=method_configs['AllocateIds'].retry, + default_timeout=method_configs['AllocateIds'].timeout, + client_info=client_info) + self._reserve_ids = google.api_core.gapic_v1.method.wrap_method( + self.datastore_stub.ReserveIds, + default_retry=method_configs['ReserveIds'].retry, + default_timeout=method_configs['ReserveIds'].timeout, + client_info=client_info) + + # Service calls + def lookup(self, + project_id, + keys, + read_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Looks up entities by key. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> keys = [] + >>> + >>> response = client.lookup(project_id, keys) + + Args: + project_id (str): The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): Keys of entities to look up. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this lookup request. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.ReadOptions` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.LookupResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.LookupRequest( + project_id=project_id, keys=keys, read_options=read_options) + return self._lookup(request, retry=retry, timeout=timeout) + + def run_query(self, + project_id, + partition_id, + read_options=None, + query=None, + gql_query=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Queries for entities. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> partition_id = {} + >>> + >>> response = client.run_query(project_id, partition_id) + + Args: + project_id (str): The ID of the project against which to make the request. + partition_id (Union[dict, ~google.cloud.datastore_v1.types.PartitionId]): Entities are partitioned into subsets, identified by a partition ID. + Queries are scoped to a single partition. + This partition ID is normalized with the standard default context + partition ID. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.PartitionId` + read_options (Union[dict, ~google.cloud.datastore_v1.types.ReadOptions]): The options for this query. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.ReadOptions` + query (Union[dict, ~google.cloud.datastore_v1.types.Query]): The query to run. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Query` + gql_query (Union[dict, ~google.cloud.datastore_v1.types.GqlQuery]): The GQL query to run. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.GqlQuery` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.RunQueryResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + query=query, + gql_query=gql_query, ) + + request = datastore_pb2.RunQueryRequest( + project_id=project_id, + partition_id=partition_id, + read_options=read_options, + query=query, + gql_query=gql_query) + return self._run_query(request, retry=retry, timeout=timeout) + + def begin_transaction(self, + project_id, + transaction_options=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Begins a new transaction. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> + >>> response = client.begin_transaction(project_id) + + Args: + project_id (str): The ID of the project against which to make the request. + transaction_options (Union[dict, ~google.cloud.datastore_v1.types.TransactionOptions]): Options for a new transaction. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.TransactionOptions` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.BeginTransactionResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.BeginTransactionRequest( + project_id=project_id, transaction_options=transaction_options) + return self._begin_transaction(request, retry=retry, timeout=timeout) + + def commit(self, + project_id, + mode, + mutations, + transaction=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Commits a transaction, optionally creating, deleting or modifying some + entities. + + Example: + >>> from google.cloud import datastore_v1 + >>> from google.cloud.datastore_v1 import enums + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> mode = enums.CommitRequest.Mode.MODE_UNSPECIFIED + >>> mutations = [] + >>> + >>> response = client.commit(project_id, mode, mutations) + + Args: + project_id (str): The ID of the project against which to make the request. + mode (~google.cloud.datastore_v1.types.Mode): The type of commit to perform. Defaults to ``TRANSACTIONAL``. + mutations (list[Union[dict, ~google.cloud.datastore_v1.types.Mutation]]): The mutations to perform. + + When mode is ``TRANSACTIONAL``, mutations affecting a single entity are + applied in order. The following sequences of mutations affecting a single + entity are not permitted in a single ``Commit`` request: + + - ``insert`` followed by ``insert`` + - ``update`` followed by ``insert`` + - ``upsert`` followed by ``insert`` + - ``delete`` followed by ``update`` + + When mode is ``NON_TRANSACTIONAL``, no two mutations may affect a single + entity. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Mutation` + transaction (bytes): The identifier of the transaction associated with the commit. A + transaction identifier is returned by a call to + ``Datastore.BeginTransaction``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.CommitResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof(transaction=transaction, ) + + request = datastore_pb2.CommitRequest( + project_id=project_id, + mode=mode, + mutations=mutations, + transaction=transaction) + return self._commit(request, retry=retry, timeout=timeout) + + def rollback(self, + project_id, + transaction, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Rolls back a transaction. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> transaction = b'' + >>> + >>> response = client.rollback(project_id, transaction) + + Args: + project_id (str): The ID of the project against which to make the request. + transaction (bytes): The transaction identifier, returned by a call to + ``Datastore.BeginTransaction``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.RollbackResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.RollbackRequest( + project_id=project_id, transaction=transaction) + return self._rollback(request, retry=retry, timeout=timeout) + + def allocate_ids(self, + project_id, + keys, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> keys = [] + >>> + >>> response = client.allocate_ids(project_id, keys) + + Args: + project_id (str): The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with incomplete key paths for which to allocate IDs. + No key may be reserved/read-only. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.AllocateIdsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.AllocateIdsRequest( + project_id=project_id, keys=keys) + return self._allocate_ids(request, retry=retry, timeout=timeout) + + def reserve_ids(self, + project_id, + keys, + database_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + + Example: + >>> from google.cloud import datastore_v1 + >>> + >>> client = datastore_v1.DatastoreClient() + >>> + >>> project_id = '' + >>> keys = [] + >>> + >>> response = client.reserve_ids(project_id, keys) + + Args: + project_id (str): The ID of the project against which to make the request. + keys (list[Union[dict, ~google.cloud.datastore_v1.types.Key]]): A list of keys with complete key paths whose numeric IDs should not be + auto-allocated. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datastore_v1.types.Key` + database_id (str): If not empty, the ID of the database against which to make the request. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.datastore_v1.types.ReserveIdsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = datastore_pb2.ReserveIdsRequest( + project_id=project_id, keys=keys, database_id=database_id) + return self._reserve_ids(request, retry=retry, timeout=timeout) diff --git a/datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py b/datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py new file mode 100644 index 0000000000000..1a3eb9523447a --- /dev/null +++ b/datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py @@ -0,0 +1,58 @@ +config = { + "interfaces": { + "google.datastore.v1.Datastore": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "Lookup": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "RunQuery": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "BeginTransaction": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Commit": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Rollback": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "AllocateIds": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ReserveIds": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/datastore/google/cloud/datastore_v1/gapic/enums.py b/datastore/google/cloud/datastore_v1/gapic/enums.py new file mode 100644 index 0000000000000..31dc31d8e92f8 --- /dev/null +++ b/datastore/google/cloud/datastore_v1/gapic/enums.py @@ -0,0 +1,150 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class NullValue(object): + """ + ``NullValue`` is a singleton enumeration to represent the null value for the + ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + NULL_VALUE = 0 + + +class EntityResult(object): + class ResultType(object): + """ + Specifies what data the 'entity' field contains. + A ``ResultType`` is either implied (for example, in ``LookupResponse.missing`` + from ``datastore.proto``, it is always ``KEY_ONLY``) or specified by context + (for example, in message ``QueryResultBatch``, field ``entity_result_type`` + specifies a ``ResultType`` for all the values in field ``entity_results``). + + Attributes: + RESULT_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. + FULL (int): The key and properties. + PROJECTION (int): A projected subset of properties. The entity may have no key. + KEY_ONLY (int): Only the key. + """ + RESULT_TYPE_UNSPECIFIED = 0 + FULL = 1 + PROJECTION = 2 + KEY_ONLY = 3 + + +class PropertyOrder(object): + class Direction(object): + """ + The sort direction. + + Attributes: + DIRECTION_UNSPECIFIED (int): Unspecified. This value must not be used. + ASCENDING (int): Ascending. + DESCENDING (int): Descending. + """ + DIRECTION_UNSPECIFIED = 0 + ASCENDING = 1 + DESCENDING = 2 + + +class CompositeFilter(object): + class Operator(object): + """ + A composite filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + AND (int): The results are required to satisfy each of the combined filters. + """ + OPERATOR_UNSPECIFIED = 0 + AND = 1 + + +class PropertyFilter(object): + class Operator(object): + """ + A property filter operator. + + Attributes: + OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. + LESS_THAN (int): Less than. + LESS_THAN_OR_EQUAL (int): Less than or equal. + GREATER_THAN (int): Greater than. + GREATER_THAN_OR_EQUAL (int): Greater than or equal. + EQUAL (int): Equal. + HAS_ANCESTOR (int): Has ancestor. + """ + OPERATOR_UNSPECIFIED = 0 + LESS_THAN = 1 + LESS_THAN_OR_EQUAL = 2 + GREATER_THAN = 3 + GREATER_THAN_OR_EQUAL = 4 + EQUAL = 5 + HAS_ANCESTOR = 11 + + +class QueryResultBatch(object): + class MoreResultsType(object): + """ + The possible values for the ``more_results`` field. + + Attributes: + MORE_RESULTS_TYPE_UNSPECIFIED (int): Unspecified. This value is never used. + NOT_FINISHED (int): There may be additional batches to fetch from this query. + MORE_RESULTS_AFTER_LIMIT (int): The query is finished, but there may be more results after the limit. + MORE_RESULTS_AFTER_CURSOR (int): The query is finished, but there may be more results after the end + cursor. + NO_MORE_RESULTS (int): The query is finished, and there are no more results. + """ + MORE_RESULTS_TYPE_UNSPECIFIED = 0 + NOT_FINISHED = 1 + MORE_RESULTS_AFTER_LIMIT = 2 + MORE_RESULTS_AFTER_CURSOR = 4 + NO_MORE_RESULTS = 3 + + +class CommitRequest(object): + class Mode(object): + """ + The modes available for commits. + + Attributes: + MODE_UNSPECIFIED (int): Unspecified. This value must not be used. + TRANSACTIONAL (int): Transactional: The mutations are either all applied, or none are applied. + Learn about transactions `here `_. + NON_TRANSACTIONAL (int): Non-transactional: The mutations may not apply as all or none. + """ + MODE_UNSPECIFIED = 0 + TRANSACTIONAL = 1 + NON_TRANSACTIONAL = 2 + + +class ReadOptions(object): + class ReadConsistency(object): + """ + The possible values for read consistencies. + + Attributes: + READ_CONSISTENCY_UNSPECIFIED (int): Unspecified. This value must not be used. + STRONG (int): Strong consistency. + EVENTUAL (int): Eventual consistency. + """ + READ_CONSISTENCY_UNSPECIFIED = 0 + STRONG = 1 + EVENTUAL = 2 diff --git a/datastore/google/cloud/datastore_v1/proto/__init__.py b/datastore/google/cloud/datastore_v1/proto/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/datastore/google/cloud/datastore_v1/proto/datastore_pb2.py b/datastore/google/cloud/datastore_v1/proto/datastore_pb2.py new file mode 100644 index 0000000000000..688ff0bcfa4de --- /dev/null +++ b/datastore/google/cloud/datastore_v1/proto/datastore_pb2.py @@ -0,0 +1,1745 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_v1/proto/datastore.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.datastore_v1.proto import entity_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_entity__pb2 +from google.cloud.datastore_v1.proto import query_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_query__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/datastore_v1/proto/datastore.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n/google/cloud/datastore_v1/proto/datastore.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/datastore_v1/proto/entity.proto\x1a+google/cloud/datastore_v1/proto/query.proto\"\x83\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12&\n\x04keys\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\xa2\x01\n\x0eLookupResponse\x12\x30\n\x05\x66ound\x18\x01 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x32\n\x07missing\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12*\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x84\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x36\n\x0cpartition_id\x18\x02 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x36\n\x0cread_options\x18\x01 \x01(\x0b\x32 .google.datastore.v1.ReadOptions\x12+\n\x05query\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.QueryH\x00\x12\x32\n\tgql_query\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1.GqlQueryH\x00\x42\x0c\n\nquery_type\"s\n\x10RunQueryResponse\x12\x34\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32%.google.datastore.v1.QueryResultBatch\x12)\n\x05query\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Query\"s\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x44\n\x13transaction_options\x18\n \x01(\x0b\x32\'.google.datastore.v1.TransactionOptions\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x83\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x35\n\x04mode\x18\x05 \x01(\x0e\x32\'.google.datastore.v1.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x30\n\tmutations\x18\x06 \x03(\x0b\x32\x1d.google.datastore.v1.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"f\n\x0e\x43ommitResponse\x12=\n\x10mutation_results\x18\x03 \x03(\x0b\x32#.google.datastore.v1.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"P\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"=\n\x13\x41llocateIdsResponse\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"d\n\x11ReserveIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0b\x64\x61tabase_id\x18\t \x01(\t\x12&\n\x04keys\x18\x01 \x03(\x0b\x32\x18.google.datastore.v1.Key\"\x14\n\x12ReserveIdsResponse\"\x87\x02\n\x08Mutation\x12-\n\x06insert\x18\x04 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06update\x18\x05 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12-\n\x06upsert\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12*\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0c\x62\x61se_version\x18\x08 \x01(\x03H\x01\x42\x0b\n\toperationB\x1d\n\x1b\x63onflict_detection_strategy\"c\n\x0eMutationResult\x12%\n\x03key\x18\x03 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12\x0f\n\x07version\x18\x04 \x01(\x03\x12\x19\n\x11\x63onflict_detected\x18\x05 \x01(\x08\"\xd5\x01\n\x0bReadOptions\x12L\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x30.google.datastore.v1.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type\"\xe3\x01\n\x12TransactionOptions\x12G\n\nread_write\x18\x01 \x01(\x0b\x32\x31.google.datastore.v1.TransactionOptions.ReadWriteH\x00\x12\x45\n\tread_only\x18\x02 \x01(\x0b\x32\x30.google.datastore.v1.TransactionOptions.ReadOnlyH\x00\x1a)\n\tReadWrite\x12\x1c\n\x14previous_transaction\x18\x01 \x01(\x0c\x1a\n\n\x08ReadOnlyB\x06\n\x04mode2\xec\x07\n\tDatastore\x12~\n\x06Lookup\x12\".google.datastore.v1.LookupRequest\x1a#.google.datastore.v1.LookupResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:lookup:\x01*\x12\x86\x01\n\x08RunQuery\x12$.google.datastore.v1.RunQueryRequest\x1a%.google.datastore.v1.RunQueryResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:runQuery:\x01*\x12\xa6\x01\n\x10\x42\x65ginTransaction\x12,.google.datastore.v1.BeginTransactionRequest\x1a-.google.datastore.v1.BeginTransactionResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1/projects/{project_id}:beginTransaction:\x01*\x12~\n\x06\x43ommit\x12\".google.datastore.v1.CommitRequest\x1a#.google.datastore.v1.CommitResponse\"+\x82\xd3\xe4\x93\x02%\" /v1/projects/{project_id}:commit:\x01*\x12\x86\x01\n\x08Rollback\x12$.google.datastore.v1.RollbackRequest\x1a%.google.datastore.v1.RollbackResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1/projects/{project_id}:rollback:\x01*\x12\x92\x01\n\x0b\x41llocateIds\x12\'.google.datastore.v1.AllocateIdsRequest\x1a(.google.datastore.v1.AllocateIdsResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1/projects/{project_id}:allocateIds:\x01*\x12\x8e\x01\n\nReserveIds\x12&.google.datastore.v1.ReserveIdsRequest\x1a\'.google.datastore.v1.ReserveIdsResponse\"/\x82\xd3\xe4\x93\x02)\"$/v1/projects/{project_id}:reserveIds:\x01*B\x85\x01\n\x17\x63om.google.datastore.v1B\x0e\x44\x61tastoreProtoP\x01Z=0.15.0.""" + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + def Lookup(self, request, context): + """Looks up entities by key. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def RunQuery(self, request, context): + """Queries for entities. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def BeginTransaction(self, request, context): + """Begins a new transaction. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Commit(self, request, context): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Rollback(self, request, context): + """Rolls back a transaction. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AllocateIds(self, request, context): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ReserveIds(self, request, context): + """Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaDatastoreStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + def Lookup(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Looks up entities by key. + """ + raise NotImplementedError() + Lookup.future = None + def RunQuery(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Queries for entities. + """ + raise NotImplementedError() + RunQuery.future = None + def BeginTransaction(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Begins a new transaction. + """ + raise NotImplementedError() + BeginTransaction.future = None + def Commit(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + raise NotImplementedError() + Commit.future = None + def Rollback(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Rolls back a transaction. + """ + raise NotImplementedError() + Rollback.future = None + def AllocateIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + raise NotImplementedError() + AllocateIds.future = None + def ReserveIds(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + """ + raise NotImplementedError() + ReserveIds.future = None + + + def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.FromString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.FromString, + ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.FromString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.FromString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsRequest.FromString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.FromString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.FromString, + } + response_serializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.SerializeToString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.SerializeToString, + } + method_implementations = { + ('google.datastore.v1.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds), + ('google.datastore.v1.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction), + ('google.datastore.v1.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit), + ('google.datastore.v1.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup), + ('google.datastore.v1.Datastore', 'ReserveIds'): face_utilities.unary_unary_inline(servicer.ReserveIds), + ('google.datastore.v1.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback), + ('google.datastore.v1.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Commit'): CommitRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackRequest.SerializeToString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryRequest.SerializeToString, + } + response_deserializers = { + ('google.datastore.v1.Datastore', 'AllocateIds'): AllocateIdsResponse.FromString, + ('google.datastore.v1.Datastore', 'BeginTransaction'): BeginTransactionResponse.FromString, + ('google.datastore.v1.Datastore', 'Commit'): CommitResponse.FromString, + ('google.datastore.v1.Datastore', 'Lookup'): LookupResponse.FromString, + ('google.datastore.v1.Datastore', 'ReserveIds'): ReserveIdsResponse.FromString, + ('google.datastore.v1.Datastore', 'Rollback'): RollbackResponse.FromString, + ('google.datastore.v1.Datastore', 'RunQuery'): RunQueryResponse.FromString, + } + cardinalities = { + 'AllocateIds': cardinality.Cardinality.UNARY_UNARY, + 'BeginTransaction': cardinality.Cardinality.UNARY_UNARY, + 'Commit': cardinality.Cardinality.UNARY_UNARY, + 'Lookup': cardinality.Cardinality.UNARY_UNARY, + 'ReserveIds': cardinality.Cardinality.UNARY_UNARY, + 'Rollback': cardinality.Cardinality.UNARY_UNARY, + 'RunQuery': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.datastore.v1.Datastore', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py b/datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py new file mode 100644 index 0000000000000..5209ca6e146de --- /dev/null +++ b/datastore/google/cloud/datastore_v1/proto/datastore_pb2_grpc.py @@ -0,0 +1,163 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.datastore_v1.proto import datastore_pb2 as google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2 + + +class DatastoreStub(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Lookup = channel.unary_unary( + '/google.datastore.v1.Datastore/Lookup', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupResponse.FromString, + ) + self.RunQuery = channel.unary_unary( + '/google.datastore.v1.Datastore/RunQuery', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryResponse.FromString, + ) + self.BeginTransaction = channel.unary_unary( + '/google.datastore.v1.Datastore/BeginTransaction', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionResponse.FromString, + ) + self.Commit = channel.unary_unary( + '/google.datastore.v1.Datastore/Commit', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitResponse.FromString, + ) + self.Rollback = channel.unary_unary( + '/google.datastore.v1.Datastore/Rollback', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackResponse.FromString, + ) + self.AllocateIds = channel.unary_unary( + '/google.datastore.v1.Datastore/AllocateIds', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsResponse.FromString, + ) + self.ReserveIds = channel.unary_unary( + '/google.datastore.v1.Datastore/ReserveIds', + request_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsResponse.FromString, + ) + + +class DatastoreServicer(object): + """Each RPC normalizes the partition IDs of the keys in its input entities, + and always returns entities with keys with normalized partition IDs. + This applies to all keys and entities, including those in values, except keys + with both an empty path and an empty or unset partition ID. Normalization of + input keys sets the project ID (if not already set) to the project ID from + the request. + + """ + + def Lookup(self, request, context): + """Looks up entities by key. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RunQuery(self, request, context): + """Queries for entities. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def BeginTransaction(self, request, context): + """Begins a new transaction. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Commit(self, request, context): + """Commits a transaction, optionally creating, deleting or modifying some + entities. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Rollback(self, request, context): + """Rolls back a transaction. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AllocateIds(self, request, context): + """Allocates IDs for the given keys, which is useful for referencing an entity + before it is inserted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ReserveIds(self, request, context): + """Prevents the supplied keys' IDs from being auto-allocated by Cloud + Datastore. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DatastoreServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Lookup': grpc.unary_unary_rpc_method_handler( + servicer.Lookup, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.LookupResponse.SerializeToString, + ), + 'RunQuery': grpc.unary_unary_rpc_method_handler( + servicer.RunQuery, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RunQueryResponse.SerializeToString, + ), + 'BeginTransaction': grpc.unary_unary_rpc_method_handler( + servicer.BeginTransaction, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.BeginTransactionResponse.SerializeToString, + ), + 'Commit': grpc.unary_unary_rpc_method_handler( + servicer.Commit, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.CommitResponse.SerializeToString, + ), + 'Rollback': grpc.unary_unary_rpc_method_handler( + servicer.Rollback, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.RollbackResponse.SerializeToString, + ), + 'AllocateIds': grpc.unary_unary_rpc_method_handler( + servicer.AllocateIds, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.AllocateIdsResponse.SerializeToString, + ), + 'ReserveIds': grpc.unary_unary_rpc_method_handler( + servicer.ReserveIds, + request_deserializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsRequest.FromString, + response_serializer=google_dot_cloud_dot_datastore__v1_dot_proto_dot_datastore__pb2.ReserveIdsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.datastore.v1.Datastore', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/datastore/google/cloud/datastore_v1/proto/entity_pb2.py b/datastore/google/cloud/datastore_v1/proto/entity_pb2.py new file mode 100644 index 0000000000000..9d864d2519d90 --- /dev/null +++ b/datastore/google/cloud/datastore_v1/proto/entity_pb2.py @@ -0,0 +1,661 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datastore_v1/proto/entity.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/datastore_v1/proto/entity.proto', + package='google.datastore.v1', + syntax='proto3', + serialized_pb=_b('\n,google/cloud/datastore_v1/proto/entity.proto\x12\x13google.datastore.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xb7\x01\n\x03Key\x12\x36\n\x0cpartition_id\x18\x01 \x01(\x0b\x32 .google.datastore.v1.PartitionId\x12\x32\n\x04path\x18\x02 \x03(\x0b\x32$.google.datastore.v1.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"8\n\nArrayValue\x12*\n\x06values\x18\x01 \x03(\x0b\x32\x1a.google.datastore.v1.Value\"\xf1\x03\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\tkey_value\x18\x05 \x01(\x0b\x32\x18.google.datastore.v1.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x33\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1b.google.datastore.v1.EntityH\x00\x12\x36\n\x0b\x61rray_value\x18\t \x01(\x0b\x32\x1f.google.datastore.v1.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xbf\x01\n\x06\x45ntity\x12%\n\x03key\x18\x01 \x01(\x0b\x32\x18.google.datastore.v1.Key\x12?\n\nproperties\x18\x03 \x03(\x0b\x32+.google.datastore.v1.Entity.PropertiesEntry\x1aM\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.Value:\x02\x38\x01\x42\x82\x01\n\x17\x63om.google.datastore.v1B\x0b\x45ntityProtoP\x01Z\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32#.google.datastore.v1.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xa9\x01\n\x0f\x43ompositeFilter\x12\x39\n\x02op\x18\x01 \x01(\x0e\x32-.google.datastore.v1.CompositeFilter.Operator\x12,\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x1b.google.datastore.v1.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xc7\x02\n\x0ePropertyFilter\x12\x38\n\x08property\x18\x01 \x01(\x0b\x32&.google.datastore.v1.PropertyReference\x12\x38\n\x02op\x18\x02 \x01(\x0e\x32,.google.datastore.v1.PropertyFilter.Operator\x12)\n\x05value\x18\x03 \x01(\x0b\x32\x1a.google.datastore.v1.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xa5\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12H\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x30.google.datastore.v1.GqlQuery.NamedBindingsEntry\x12\x43\n\x13positional_bindings\x18\x04 \x03(\x0b\x32&.google.datastore.v1.GqlQueryParameter\x1a\\\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.google.datastore.v1.GqlQueryParameter:\x02\x38\x01\"d\n\x11GqlQueryParameter\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1a.google.datastore.v1.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xde\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12H\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32,.google.datastore.v1.EntityResult.ResultType\x12\x39\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32!.google.datastore.v1.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12K\n\x0cmore_results\x18\x05 \x01(\x0e\x32\x35.google.datastore.v1.QueryResultBatch.MoreResultsType\x12\x18\n\x10snapshot_version\x18\x07 \x01(\x03\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42\x81\x01\n\x17\x63om.google.datastore.v1B\nQueryProtoP\x01Z`__. + end_cursor: + An ending point for the query results. Query cursors are + returned in query result batches and `can only be used to + limit the same query `__. + offset: + The number of results to skip. Applies before limit, but after + all other constraints. Optional. Must be >= 0 if specified. + limit: + The maximum number of results to return. Applies after all + other constraints. Optional. Unspecified is interpreted as no + limit. Must be >= 0 if specified. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.Query) + )) +_sym_db.RegisterMessage(Query) + +KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict( + DESCRIPTOR = _KINDEXPRESSION, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A representation of a kind. + + + Attributes: + name: + The name of the kind. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.KindExpression) + )) +_sym_db.RegisterMessage(KindExpression) + +PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYREFERENCE, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A reference to a property relative to the kind expressions. + + + Attributes: + name: + The name of the property. If name includes "."s, it may be + interpreted as a property name path. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyReference) + )) +_sym_db.RegisterMessage(PropertyReference) + +Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict( + DESCRIPTOR = _PROJECTION, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A representation of a property in a projection. + + + Attributes: + property: + The property to project. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.Projection) + )) +_sym_db.RegisterMessage(Projection) + +PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYORDER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """The desired order for a specific property. + + + Attributes: + property: + The property to order by. + direction: + The direction to order by. Defaults to ``ASCENDING``. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyOrder) + )) +_sym_db.RegisterMessage(PropertyOrder) + +Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict( + DESCRIPTOR = _FILTER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A holder for any type of filter. + + + Attributes: + filter_type: + The type of filter. + composite_filter: + A composite filter. + property_filter: + A filter on a property. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.Filter) + )) +_sym_db.RegisterMessage(Filter) + +CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict( + DESCRIPTOR = _COMPOSITEFILTER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A filter that merges multiple other filters using the given operator. + + + Attributes: + op: + The operator for combining multiple filters. + filters: + The list of filters to combine. Must contain at least one + filter. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.CompositeFilter) + )) +_sym_db.RegisterMessage(CompositeFilter) + +PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict( + DESCRIPTOR = _PROPERTYFILTER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A filter on a specific property. + + + Attributes: + property: + The property to filter by. + op: + The operator to filter by. + value: + The value to compare the property to. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.PropertyFilter) + )) +_sym_db.RegisterMessage(PropertyFilter) + +GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict( + + NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery.NamedBindingsEntry) + )) + , + DESCRIPTOR = _GQLQUERY, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A `GQL + query `__. + + + Attributes: + query_string: + A string of the format described `here `__. + allow_literals: + When false, the query string must not contain any literals and + instead must bind all values. For example, ``SELECT * FROM + Kind WHERE a = 'string literal'`` is not allowed, while + ``SELECT * FROM Kind WHERE a = @value`` is. + named_bindings: + For each non-reserved named binding site in the query string, + there must be a named parameter with that name, but not + necessarily the inverse. Key must match regex ``[A-Za- + z_$][A-Za-z_$0-9]*``, must not match regex ``__.*__``, and + must not be ``""``. + positional_bindings: + Numbered binding site @1 references the first numbered + parameter, effectively using 1-based indexing, rather than the + usual 0. For each binding site numbered i in + ``query_string``, there must be an i-th numbered parameter. + The inverse must also be true. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQuery) + )) +_sym_db.RegisterMessage(GqlQuery) +_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry) + +GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict( + DESCRIPTOR = _GQLQUERYPARAMETER, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A binding parameter for a GQL query. + + + Attributes: + parameter_type: + The type of parameter. + value: + A value parameter. + cursor: + A query cursor. Query cursors are returned in query result + batches. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.GqlQueryParameter) + )) +_sym_db.RegisterMessage(GqlQueryParameter) + +QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict( + DESCRIPTOR = _QUERYRESULTBATCH, + __module__ = 'google.cloud.datastore_v1.proto.query_pb2' + , + __doc__ = """A batch of results produced by a query. + + + Attributes: + skipped_results: + The number of results skipped, typically because of an offset. + skipped_cursor: + A cursor that points to the position after the last skipped + result. Will be set when ``skipped_results`` != 0. + entity_result_type: + The result type for every entity in ``entity_results``. + entity_results: + The results for this batch. + end_cursor: + A cursor that points to the position after the last result in + the batch. + more_results: + The state of the query after the current batch. + snapshot_version: + The version number of the snapshot this batch was returned + from. This applies to the range of results from the query's + ``start_cursor`` (or the beginning of the query if no cursor + was given) to this batch's ``end_cursor`` (not the query's + ``end_cursor``). In a single transaction, subsequent query + result batches for the same query can have a greater snapshot + version number. Each batch's snapshot version is valid for all + preceding batches. The value will be zero for eventually + consistent queries. + """, + # @@protoc_insertion_point(class_scope:google.datastore.v1.QueryResultBatch) + )) +_sym_db.RegisterMessage(QueryResultBatch) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.datastore.v1B\nQueryProtoP\001Z= 0.28.0, < 0.29dev', 'google-api-core >= 0.1.1, < 0.2.0dev', - 'google-gax >= 0.15.7, < 0.16dev', - 'gapic-google-cloud-datastore-v1 >= 0.15.0, < 0.16dev', + 'google-auth >= 1.0.2, < 2.0dev', + 'google-gax >= 0.15.15, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'requests >= 2.18.4, < 3.0dev', ] setup( diff --git a/datastore/tests/unit/test__gax.py b/datastore/tests/unit/test__gax.py index f81d709c6b655..9f0896058fe39 100644 --- a/datastore/tests/unit/test__gax.py +++ b/datastore/tests/unit/test__gax.py @@ -95,7 +95,7 @@ def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_lookup(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -115,7 +115,7 @@ def test_lookup(self): mock_catch_rendezvous.assert_called_once_with() def test_run_query(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -136,7 +136,7 @@ def test_run_query(self): mock_catch_rendezvous.assert_called_once_with() def test_begin_transaction(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -158,7 +158,7 @@ def test_begin_transaction(self): mock_catch_rendezvous.assert_called_once_with() def test_commit(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -178,7 +178,7 @@ def test_commit(self): mock_catch_rendezvous.assert_called_once_with() def test_rollback(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -199,7 +199,7 @@ def test_rollback(self): mock_catch_rendezvous.assert_called_once_with() def test_allocate_ids(self): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client patch1 = mock.patch.object( datastore_client.DatastoreClient, '__init__', @@ -236,7 +236,7 @@ def _call_fut(self, client): @mock.patch('google.cloud.datastore._gax.make_secure_channel', return_value=mock.sentinel.channel) def test_live_api(self, make_chan, mock_klass): - from google.cloud.gapic.datastore.v1 import datastore_client + from google.cloud.datastore_v1.gapic import datastore_client from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.datastore import __version__ @@ -252,8 +252,9 @@ def test_live_api(self, make_chan, mock_klass): make_chan.assert_called_once_with( mock.sentinel.credentials, DEFAULT_USER_AGENT, host) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, lib_name='gccl', - lib_version=__version__) + channel=mock.sentinel.channel, + client_info=mock.ANY, + ) @mock.patch( 'google.cloud.datastore._gax.GAPICDatastoreAPI', @@ -274,5 +275,6 @@ def test_emulator(self, make_chan, mock_klass): make_chan.assert_called_once_with(host) mock_klass.assert_called_once_with( - channel=mock.sentinel.channel, lib_name='gccl', - lib_version=__version__) + channel=mock.sentinel.channel, + client_info=mock.ANY, + ) diff --git a/datastore/tests/unit/test__http.py b/datastore/tests/unit/test__http.py index e840b649cdd16..cceb40419a56b 100644 --- a/datastore/tests/unit/test__http.py +++ b/datastore/tests/unit/test__http.py @@ -91,7 +91,7 @@ def _call_fut(*args, **kwargs): return _rpc(*args, **kwargs) def test_it(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 http = object() project = 'projectOK' @@ -128,7 +128,7 @@ def _make_one(self, *args, **kwargs): @staticmethod def _make_query_pb(kind): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 return query_pb2.Query( kind=[query_pb2.KindExpression(name=kind)], @@ -140,7 +140,7 @@ def test_constructor(self): self.assertIs(ds_api.client, client) def test_lookup_single_key_empty_response(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb = _make_key_pb(project) @@ -170,7 +170,7 @@ def test_lookup_single_key_empty_response(self): self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_eventual(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb = _make_key_pb(project) @@ -201,7 +201,7 @@ def test_lookup_single_key_empty_response_w_eventual(self): self.assertEqual(request.read_options, read_options) def test_lookup_single_key_empty_response_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' transaction = b'TRANSACTION' @@ -232,8 +232,8 @@ def test_lookup_single_key_empty_response_w_transaction(self): self.assertEqual(request.read_options, read_options) def test_lookup_single_key_nonempty_response(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 project = 'PROJECT' key_pb = _make_key_pb(project) @@ -269,7 +269,7 @@ def test_lookup_single_key_nonempty_response(self): self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_empty_response(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -300,7 +300,7 @@ def test_lookup_multiple_keys_empty_response(self): self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_missing(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -336,7 +336,7 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(request.read_options, read_options) def test_lookup_multiple_keys_w_deferred(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' key_pb1 = _make_key_pb(project) @@ -369,9 +369,9 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(request.read_options, read_options) def test_run_query_w_eventual_no_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -410,9 +410,9 @@ def test_run_query_w_eventual_no_transaction(self): self.assertEqual(request.read_options, read_options) def test_run_query_wo_eventual_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -451,9 +451,9 @@ def test_run_query_wo_eventual_w_transaction(self): self.assertEqual(request.read_options, read_options) def test_run_query_wo_namespace_empty_result(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Nonesuch' @@ -491,9 +491,9 @@ def test_run_query_wo_namespace_empty_result(self): self.assertEqual(request.read_options, read_options) def test_run_query_w_namespace_nonempty_result(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 project = 'PROJECT' kind = 'Kind' @@ -533,7 +533,7 @@ def test_run_query_w_namespace_nonempty_result(self): self.assertEqual(request.query, query_pb) def test_begin_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' transaction = b'TRANSACTION' @@ -561,7 +561,7 @@ def test_begin_transaction(self): self.assertEqual(request.project_id, u'') def test_commit_wo_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' @@ -596,7 +596,7 @@ def test_commit_wo_transaction(self): self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) def test_commit_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _new_value_pb project = 'PROJECT' @@ -631,7 +631,7 @@ def test_commit_w_transaction(self): self.assertEqual(request.mode, rq_class.TRANSACTIONAL) def test_rollback_ok(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' transaction = b'xact' @@ -656,7 +656,7 @@ def test_rollback_ok(self): self.assertEqual(request.transaction, transaction) def test_allocate_ids_empty(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() @@ -681,7 +681,7 @@ def test_allocate_ids_empty(self): self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' before_key_pbs = [ diff --git a/datastore/tests/unit/test_batch.py b/datastore/tests/unit/test_batch.py index 01262ae17ba63..9b854141e5534 100644 --- a/datastore/tests/unit/test_batch.py +++ b/datastore/tests/unit/test_batch.py @@ -43,7 +43,7 @@ def test_ctor(self): self.assertEqual(batch._partial_key_entities, []) def test_current(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' client = _Client(project) @@ -219,7 +219,7 @@ def test_rollback_wrong_status(self): self.assertRaises(ValueError, batch.rollback) def test_commit(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' client = _Client(project) @@ -244,7 +244,7 @@ def test_commit_wrong_status(self): self.assertRaises(ValueError, batch.commit) def test_commit_w_partial_key_entities(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' new_id = 1234 @@ -269,7 +269,7 @@ def test_commit_w_partial_key_entities(self): self.assertEqual(entity.key._id, new_id) def test_as_context_mgr_wo_error(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' properties = {'foo': 'bar'} @@ -293,7 +293,7 @@ def test_as_context_mgr_wo_error(self): project, mode, batch.mutations, transaction=None) def test_as_context_mgr_nested(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' properties = {'foo': 'bar'} @@ -381,8 +381,8 @@ def _call_fut(self, commit_response_pb): return _parse_commit_response(commit_response_pb) def test_it(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 index_updates = 1337 keys = [ @@ -434,7 +434,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out @@ -494,8 +494,8 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): def _make_mutation(id_): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = 'PROJECT' @@ -506,7 +506,7 @@ def _make_mutation(id_): def _make_commit_response(*new_key_ids): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 mutation_results = [ _make_mutation(key_id) for key_id in new_key_ids] diff --git a/datastore/tests/unit/test_client.py b/datastore/tests/unit/test_client.py index 6477f53c5fa77..51b3e2651531b 100644 --- a/datastore/tests/unit/test_client.py +++ b/datastore/tests/unit/test_client.py @@ -24,7 +24,7 @@ def _make_credentials(): def _make_entity_pb(project, kind, integer_id, name=None, str_val=None): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -338,7 +338,7 @@ def test_get_multi_no_keys(self): self.assertEqual(results, []) def test_get_multi_miss(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key creds = _make_credentials() @@ -352,11 +352,14 @@ def test_get_multi_miss(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key.to_protobuf()]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key.to_protobuf()], + ) def test_get_multi_miss_w_missing(self): - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key KIND = 'Kind' @@ -386,7 +389,10 @@ def test_get_multi_miss_w_missing(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key_pb], + ) def test_get_multi_w_missing_non_empty(self): from google.cloud.datastore.key import Key @@ -411,7 +417,7 @@ def test_get_multi_w_deferred_non_empty(self): [key], deferred=deferred) def test_get_multi_miss_w_deferred(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key key = Key('Kind', 1234, project=self.PROJECT) @@ -432,11 +438,14 @@ def test_get_multi_miss_w_deferred(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key_pb], + ) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -479,12 +488,18 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): self.assertEqual(ds_api.lookup.call_count, 2) read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_any_call( - self.PROJECT, read_options, [key2_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key2_pb], + ) ds_api.lookup.assert_any_call( - self.PROJECT, read_options, [key1_pb, key2_pb]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key1_pb, key2_pb], + ) def test_get_multi_hit(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key kind = 'Kind' @@ -514,10 +529,13 @@ def test_get_multi_hit(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key.to_protobuf()]) + keys=[key.to_protobuf()], + project_id=self.PROJECT, + read_options=read_options, + ) def test_get_multi_hit_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key txn_id = b'123' @@ -550,10 +568,13 @@ def test_get_multi_hit_w_transaction(self): read_options = datastore_pb2.ReadOptions(transaction=txn_id) ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, [key.to_protobuf()]) + project_id=self.PROJECT, + keys=[key.to_protobuf()], + read_options=read_options, + ) def test_get_multi_hit_multiple_keys_same_project(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.key import Key kind = 'Kind' @@ -584,8 +605,10 @@ def test_get_multi_hit_multiple_keys_same_project(self): read_options = datastore_pb2.ReadOptions() ds_api.lookup.assert_called_once_with( - self.PROJECT, read_options, - [key1.to_protobuf(), key2.to_protobuf()]) + project_id=self.PROJECT, + read_options=read_options, + keys=[key1.to_protobuf(), key2.to_protobuf()], + ) def test_get_multi_hit_multiple_keys_different_project(self): from google.cloud.datastore.key import Key @@ -668,7 +691,7 @@ def test_put_multi_w_single_empty_entity(self): self.assertRaises(ValueError, client.put_multi, Entity()) def test_put_multi_no_batch_w_partial_key(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 from google.cloud.datastore.helpers import _property_tuples entity = _Entity(foo=u'bar') @@ -750,7 +773,7 @@ def test_delete_multi_no_keys(self): client._datastore_api_internal.commit.assert_not_called() def test_delete_multi_no_batch(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 key = _Key(self.PROJECT) @@ -1022,7 +1045,7 @@ def test_eventual_w_transaction(self): self._call_fut(True, b'123') def test_eventual_wo_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 read_options = self._call_fut(True, None) expected = datastore_pb2.ReadOptions( @@ -1030,7 +1053,7 @@ def test_eventual_wo_transaction(self): self.assertEqual(read_options, expected) def test_default_w_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 txn_id = b'123abc-easy-as' read_options = self._call_fut(False, txn_id) @@ -1038,7 +1061,7 @@ def test_default_w_transaction(self): self.assertEqual(read_options, expected) def test_default_wo_transaction(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 read_options = self._call_fut(False, None) expected = datastore_pb2.ReadOptions() @@ -1103,7 +1126,7 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = self._key = entity_pb2.Key() # Don't assign it, because it will just get ripped out @@ -1153,7 +1176,7 @@ def _mutated_pb(test_case, mutation_pb_list, mutation_type): def _make_key(id_): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() elem = key.path.add() @@ -1162,7 +1185,7 @@ def _make_key(id_): def _make_commit_response(*keys): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 mutation_results = [ datastore_pb2.MutationResult(key=key) for key in keys] diff --git a/datastore/tests/unit/test_helpers.py b/datastore/tests/unit/test_helpers.py index be4855d5e48c0..5e91de0196f46 100644 --- a/datastore/tests/unit/test_helpers.py +++ b/datastore/tests/unit/test_helpers.py @@ -23,7 +23,7 @@ def _call_fut(self, entity_pb, name): return _new_value_pb(entity_pb, name) def test_it(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 entity_pb = entity_pb2.Entity() name = 'foo' @@ -43,7 +43,7 @@ def _call_fut(self, entity_pb): def test_it(self): import types - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -66,7 +66,7 @@ def _call_fut(self, val): return entity_from_protobuf(val) def test_it(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -112,7 +112,7 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb _PROJECT = 'PROJECT' @@ -136,7 +136,7 @@ def test_mismatched_value_indexed(self): self._call_fut(entity_pb) def test_entity_no_key(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 entity_pb = entity_pb2.Entity() entity = self._call_fut(entity_pb) @@ -145,7 +145,7 @@ def test_entity_no_key(self): self.assertEqual(dict(entity), {}) def test_entity_with_meaning(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb entity_pb = entity_pb2.Entity() @@ -160,7 +160,7 @@ def test_entity_with_meaning(self): self.assertEqual(entity._meanings, {name: (meaning, val)}) def test_nested_entity_no_key(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb PROJECT = 'FOO' @@ -218,7 +218,7 @@ def _compare_entity_proto(self, entity_pb1, entity_pb2): self.assertEqual(val1, val2) def test_empty(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -226,7 +226,7 @@ def test_empty(self): self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_key_only(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key @@ -245,7 +245,7 @@ def test_key_only(self): self._compare_entity_proto(entity_pb, expected_pb) def test_simple_fields(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -265,7 +265,7 @@ def test_simple_fields(self): self._compare_entity_proto(entity_pb, expected_pb) def test_with_empty_list(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -275,7 +275,7 @@ def test_with_empty_list(self): self._compare_entity_proto(entity_pb, entity_pb2.Entity()) def test_inverts_to_protobuf(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import _new_value_pb from google.cloud.datastore.helpers import entity_from_protobuf @@ -328,7 +328,7 @@ def test_inverts_to_protobuf(self): self._compare_entity_proto(original_pb, new_pb) def test_meaning_with_change(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -346,7 +346,7 @@ def test_meaning_with_change(self): self._compare_entity_proto(entity_pb, expected_pb) def test_variable_meanings(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -372,7 +372,7 @@ def test_variable_meanings(self): self._compare_entity_proto(entity_pb, expected_pb) def test_dict_to_entity(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -395,7 +395,7 @@ def test_dict_to_entity(self): self.assertEqual(entity_pb, expected_pb) def test_dict_to_entity_recursive(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() @@ -451,7 +451,7 @@ def _call_fut(self, val): return key_from_protobuf(val) def _makePB(self, project=None, namespace=None, path=()): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Key() if project is not None: @@ -636,7 +636,7 @@ def _call_fut(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value() setattr(pb, attr_name, value) @@ -646,7 +646,7 @@ def test_datetime(self): import calendar import datetime from google.cloud._helpers import UTC - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 micros = 4375 utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC) @@ -656,7 +656,7 @@ def test_datetime(self): self.assertEqual(self._call_fut(pb), utc) def test_key(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.key import Key pb = entity_pb2.Value() @@ -686,7 +686,7 @@ def test_unicode(self): self.assertEqual(self._call_fut(pb), u'str') def test_entity(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb @@ -702,7 +702,7 @@ def test_entity(self): self.assertEqual(entity['foo'], 'Foo') def test_array(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value() array_pb = pb.array_value.values @@ -715,7 +715,7 @@ def test_array(self): def test_geo_point(self): from google.type import latlng_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.helpers import GeoPoint lat = -3.14 @@ -729,14 +729,14 @@ def test_geo_point(self): def test_null(self): from google.protobuf import struct_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE) result = self._call_fut(pb) self.assertIsNone(result) def test_unknown(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 pb = entity_pb2.Value() with self.assertRaises(ValueError): @@ -751,7 +751,7 @@ def _call_fut(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 return entity_pb2.Value() def test_datetime(self): @@ -891,14 +891,14 @@ def _call_fut(self, *args, **kwargs): return _get_meaning(*args, **kwargs) def test_no_meaning(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() result = self._call_fut(value_pb) self.assertIsNone(result) def test_single(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() value_pb.meaning = meaning = 22 @@ -907,7 +907,7 @@ def test_single(self): self.assertEqual(meaning, result) def test_empty_array_value(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() value_pb.array_value.values.add() @@ -917,7 +917,7 @@ def test_empty_array_value(self): self.assertEqual(None, result) def test_array_value(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() meaning = 9 @@ -932,7 +932,7 @@ def test_array_value(self): self.assertEqual(meaning, result) def test_array_value_multiple_meanings(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 @@ -949,7 +949,7 @@ def test_array_value_multiple_meanings(self): self.assertEqual(result, [meaning1, meaning2]) def test_array_value_meaning_partially_unset(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 value_pb = entity_pb2.Value() meaning1 = 9 diff --git a/datastore/tests/unit/test_key.py b/datastore/tests/unit/test_key.py index 3f82412e27cd6..e95d756013cce 100644 --- a/datastore/tests/unit/test_key.py +++ b/datastore/tests/unit/test_key.py @@ -332,7 +332,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 _KIND = 'KIND' key = self._make_one(_KIND, project=self._DEFAULT_PROJECT) diff --git a/datastore/tests/unit/test_query.py b/datastore/tests/unit/test_query.py index 111a2ceed0bf9..791031c1444e5 100644 --- a/datastore/tests/unit/test_query.py +++ b/datastore/tests/unit/test_query.py @@ -395,7 +395,7 @@ def test_constructor_explicit(self): self.assertTrue(iterator._more_results) def test__build_protobuf_empty(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -407,7 +407,7 @@ def test__build_protobuf_empty(self): self.assertEqual(pb, expected_pb) def test__build_protobuf_all_values(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query client = _Client(None) @@ -435,7 +435,7 @@ def test__build_protobuf_all_values(self): self.assertEqual(pb, expected_pb) def test__process_query_results(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -458,7 +458,7 @@ def test__process_query_results(self): self.assertTrue(iterator._more_results) def test__process_query_results_done(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 iterator = self._make_one(None, None, end_cursor='abcd') @@ -489,9 +489,9 @@ def test__process_query_results_bad_enum(self): def _next_page_helper(self, txn_id=None): from google.api_core import page_iterator - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import entity_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 + from google.cloud.datastore_v1.proto import query_pb2 from google.cloud.datastore.query import Query more_enum = query_pb2.QueryResultBatch.NOT_FINISHED @@ -568,7 +568,7 @@ def _call_fut(self, query): return _pb_from_query(query) def test_empty(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 pb = self._call_fut(_Query()) self.assertEqual(list(pb.projection), []) @@ -596,7 +596,7 @@ def test_kind(self): def test_ancestor(self): from google.cloud.datastore.key import Key - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') pb = self._call_fut(_Query(ancestor=ancestor)) @@ -609,7 +609,7 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 query = _Query(filters=[('name', '=', u'John')]) query.OPERATORS = { @@ -625,7 +625,7 @@ def test_filter(self): def test_filter_key(self): from google.cloud.datastore.key import Key - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 key = Key('Kind', 123, project='PROJECT') query = _Query(filters=[('__key__', '=', key)]) @@ -642,7 +642,7 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import query_pb2 pb = self._call_fut(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], @@ -696,7 +696,7 @@ def current_transaction(self): def _make_entity(kind, id_, project): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project @@ -708,8 +708,8 @@ def _make_entity(kind, id_, project): def _make_query_response( entity_pbs, cursor_as_bytes, more_results_enum, skipped_results): - from google.cloud.proto.datastore.v1 import datastore_pb2 - from google.cloud.proto.datastore.v1 import query_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 + from google.cloud.datastore_v1.proto import query_pb2 return datastore_pb2.RunQueryResponse( batch=query_pb2.QueryResultBatch( diff --git a/datastore/tests/unit/test_transaction.py b/datastore/tests/unit/test_transaction.py index 5479fbf808123..2c72f01dc34aa 100644 --- a/datastore/tests/unit/test_transaction.py +++ b/datastore/tests/unit/test_transaction.py @@ -40,7 +40,7 @@ def test_ctor_defaults(self): self.assertEqual(len(xact._partial_key_entities), 0) def test_current(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' id_ = 678 @@ -130,7 +130,7 @@ def test_rollback(self): ds_api.begin_transaction.assert_called_once_with(project) def test_commit_no_partial_keys(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' id_ = 1002930 @@ -147,7 +147,7 @@ def test_commit_no_partial_keys(self): ds_api.begin_transaction.assert_called_once_with(project) def test_commit_w_partial_keys(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' kind = 'KIND' @@ -170,7 +170,7 @@ def test_commit_w_partial_keys(self): ds_api.begin_transaction.assert_called_once_with(project) def test_context_manager_no_raise(self): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 project = 'PROJECT' id_ = 912830 @@ -214,7 +214,7 @@ class Foo(Exception): def _make_key(kind, id_, project): - from google.cloud.proto.datastore.v1 import entity_pb2 + from google.cloud.datastore_v1.proto import entity_pb2 key = entity_pb2.Key() key.partition_id.project_id = project @@ -271,7 +271,7 @@ def __exit__(self, *args): def _make_commit_response(*keys): - from google.cloud.proto.datastore.v1 import datastore_pb2 + from google.cloud.datastore_v1.proto import datastore_pb2 mutation_results = [ datastore_pb2.MutationResult(key=key) for key in keys] From 10ef028a9fd627f40f376757e738ad8b24b4755d Mon Sep 17 00:00:00 2001 From: Evawere Ogbe Date: Wed, 8 Nov 2017 15:37:42 -0800 Subject: [PATCH 5/5] api_core: Add routing header (#4336) * api_core: Add routing header * Fix linting * Review fixes * More review fixes * Documentation fixes --- api_core/google/api_core/gapic_v1/__init__.py | 2 + .../api_core/gapic_v1/routing_header.py | 53 +++++++++++++++++++ .../tests/unit/gapic/test_routing_header.py | 29 ++++++++++ 3 files changed, 84 insertions(+) create mode 100644 api_core/google/api_core/gapic_v1/routing_header.py create mode 100644 api_core/tests/unit/gapic/test_routing_header.py diff --git a/api_core/google/api_core/gapic_v1/__init__.py b/api_core/google/api_core/gapic_v1/__init__.py index fbf674ba3b778..e26a499381566 100644 --- a/api_core/google/api_core/gapic_v1/__init__.py +++ b/api_core/google/api_core/gapic_v1/__init__.py @@ -14,8 +14,10 @@ from google.api_core.gapic_v1 import config from google.api_core.gapic_v1 import method +from google.api_core.gapic_v1 import routing_header __all__ = [ 'config', 'method', + 'routing_header', ] diff --git a/api_core/google/api_core/gapic_v1/routing_header.py b/api_core/google/api_core/gapic_v1/routing_header.py new file mode 100644 index 0000000000000..a9626ee57b752 --- /dev/null +++ b/api_core/google/api_core/gapic_v1/routing_header.py @@ -0,0 +1,53 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for constructing routing headers. + +These headers are used by Google infrastructure to determine how to route +requests, especially for services that are regional. + +Generally, these headers are specified as gRPC metadata. +""" + +from six.moves.urllib.parse import urlencode + +ROUTING_METADATA_KEY = 'x-goog-header-params' + + +def to_routing_header(params): + """Returns a routing header string for the given request parameters. + + Args: + params (Mapping[str, Any]): A dictionary containing the request + parameters used for routing. + + Returns: + str: The routing header string. + """ + return urlencode(params) + + +def to_grpc_metadata(params): + """Returns the gRPC metadata containing the routing headers for the given + request parameters. + + Args: + params (Mapping[str, Any]): A dictionary containing the request + parameters used for routing. + + Returns: + Tuple(str, str): The gRPC metadata containing the routing header key + and value. + """ + return (ROUTING_METADATA_KEY, to_routing_header(params)) diff --git a/api_core/tests/unit/gapic/test_routing_header.py b/api_core/tests/unit/gapic/test_routing_header.py new file mode 100644 index 0000000000000..d3a4bc35f7a2a --- /dev/null +++ b/api_core/tests/unit/gapic/test_routing_header.py @@ -0,0 +1,29 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.api_core.gapic_v1 import routing_header + + +def test_to_routing_header(): + params = [('name', 'meep'), ('book.read', '1')] + value = routing_header.to_routing_header(params) + assert value == "name=meep&book.read=1" + + +def test_to_grpc_metadata(): + params = [('name', 'meep'), ('book.read', '1')] + metadata = routing_header.to_grpc_metadata(params) + assert metadata == ( + routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")