From 498dba26a7c1a1cb710a92c0167272ff5c0eef27 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 19 Sep 2023 12:13:53 -0400 Subject: [PATCH 01/13] docs: Minor formatting (#1006) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Minor formatting chore: Update gapic-generator-python to v1.11.5 build: Update rules_python to 0.24.0 PiperOrigin-RevId: 563436317 Source-Link: https://github.com/googleapis/googleapis/commit/42fd37b18d706f6f51f52f209973b3b2c28f509a Source-Link: https://github.com/googleapis/googleapis-gen/commit/280264ca02fb9316b4237a96d0af1a2343a81a56 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjgwMjY0Y2EwMmZiOTMxNmI0MjM3YTk2ZDBhZjFhMjM0M2E4MWE1NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/database_admin/async_client.py | 14 +++++++------- .../services/database_admin/client.py | 14 +++++++------- .../services/database_admin/transports/base.py | 1 - .../services/database_admin/transports/grpc.py | 1 - .../database_admin/transports/grpc_asyncio.py | 1 - .../services/database_admin/transports/rest.py | 3 +-- .../types/spanner_database_admin.py | 1 + .../services/instance_admin/async_client.py | 14 ++++++++------ .../services/instance_admin/client.py | 14 ++++++++------ .../services/instance_admin/transports/grpc.py | 2 ++ .../instance_admin/transports/grpc_asyncio.py | 2 ++ .../services/instance_admin/transports/rest.py | 4 +++- .../spanner_v1/services/spanner/async_client.py | 2 ++ google/cloud/spanner_v1/services/spanner/client.py | 2 ++ .../spanner_v1/services/spanner/transports/grpc.py | 2 ++ .../services/spanner/transports/grpc_asyncio.py | 2 ++ .../spanner_v1/services/spanner/transports/rest.py | 1 + google/cloud/spanner_v1/types/spanner.py | 3 +++ google/cloud/spanner_v1/types/transaction.py | 3 +++ ..._metadata_google.spanner.admin.database.v1.json | 2 +- ..._metadata_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- .../test_database_admin.py | 2 +- .../test_instance_admin.py | 2 +- 24 files changed, 59 insertions(+), 37 deletions(-) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py index 4cd1d4756a..8da5ebb260 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -51,7 +51,7 @@ from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -1257,8 +1257,8 @@ async def sample_set_iam_policy(): The request object. Request message for ``SetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being specified. - See the operation documentation for the + policy is being specified. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1400,8 +1400,8 @@ async def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1555,8 +1555,8 @@ async def sample_test_iam_permissions(): The request object. Request message for ``TestIamPermissions`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy detail is being requested. - See the operation documentation for the + policy detail is being requested. See + the operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py index b6f2d1f1e7..39904ec05f 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -55,7 +55,7 @@ from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -1523,8 +1523,8 @@ def sample_set_iam_policy(): The request object. Request message for ``SetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being specified. - See the operation documentation for the + policy is being specified. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1663,8 +1663,8 @@ def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -1805,8 +1805,8 @@ def sample_test_iam_permissions(): The request object. Request message for ``TestIamPermissions`` method. resource (str): REQUIRED: The resource for which the - policy detail is being requested. - See the operation documentation for the + policy detail is being requested. See + the operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py index 5f800d5063..2d2b2b5ad9 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -32,7 +32,6 @@ from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py index a42258e96c..d518b455fa 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -30,7 +30,6 @@ from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py index badd1058a1..ddf3d0eb53 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -30,7 +30,6 @@ from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py index bd35307fcc..5aaedde91c 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -28,7 +28,6 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.longrunning import operations_pb2 from requests import __version__ as requests_version import dataclasses import re @@ -46,8 +45,8 @@ from google.cloud.spanner_admin_database_v1.types import spanner_database_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import ( DatabaseAdminTransport, diff --git a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py index 8ba67a4480..92f6f58613 100644 --- a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -131,6 +131,7 @@ class Database(proto.Message): the encryption information for the database, such as encryption state and the Cloud KMS key versions that are in use. + For databases that are using Google default or other types of encryption, this field is empty. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py index f6dbc4e73d..3c35c25c5d 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -58,10 +58,12 @@ class InstanceAdminAsyncClient: """Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, delete, modify and list instances. Instances are dedicated Cloud Spanner serving and storage resources to be used by Cloud Spanner databases. + Each instance has a "configuration", which dictates where the serving resources for the Cloud Spanner instance are located (e.g., US-central, Europe). Configurations are created by Google @@ -1879,8 +1881,8 @@ async def sample_set_iam_policy(): The request object. Request message for ``SetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being specified. - See the operation documentation for the + policy is being specified. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -2018,8 +2020,8 @@ async def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -2170,8 +2172,8 @@ async def sample_test_iam_permissions(): The request object. Request message for ``TestIamPermissions`` method. resource (:class:`str`): REQUIRED: The resource for which the - policy detail is being requested. - See the operation documentation for the + policy detail is being requested. See + the operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py index dd94cacafb..cab796f644 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -98,10 +98,12 @@ def get_transport_class( class InstanceAdminClient(metaclass=InstanceAdminClientMeta): """Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, delete, modify and list instances. Instances are dedicated Cloud Spanner serving and storage resources to be used by Cloud Spanner databases. + Each instance has a "configuration", which dictates where the serving resources for the Cloud Spanner instance are located (e.g., US-central, Europe). Configurations are created by Google @@ -2073,8 +2075,8 @@ def sample_set_iam_policy(): The request object. Request message for ``SetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being specified. - See the operation documentation for the + policy is being specified. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -2209,8 +2211,8 @@ def sample_get_iam_policy(): The request object. Request message for ``GetIamPolicy`` method. resource (str): REQUIRED: The resource for which the - policy is being requested. - See the operation documentation for the + policy is being requested. See the + operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field @@ -2348,8 +2350,8 @@ def sample_test_iam_permissions(): The request object. Request message for ``TestIamPermissions`` method. resource (str): REQUIRED: The resource for which the - policy detail is being requested. - See the operation documentation for the + policy detail is being requested. See + the operation documentation for the appropriate value for this field. This corresponds to the ``resource`` field diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index 4e5be0b229..03fef980e6 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -37,10 +37,12 @@ class InstanceAdminGrpcTransport(InstanceAdminTransport): """gRPC backend transport for InstanceAdmin. Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, delete, modify and list instances. Instances are dedicated Cloud Spanner serving and storage resources to be used by Cloud Spanner databases. + Each instance has a "configuration", which dictates where the serving resources for the Cloud Spanner instance are located (e.g., US-central, Europe). Configurations are created by Google diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index b04bc2543b..a5ff6d1635 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -38,10 +38,12 @@ class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): """gRPC AsyncIO backend transport for InstanceAdmin. Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, delete, modify and list instances. Instances are dedicated Cloud Spanner serving and storage resources to be used by Cloud Spanner databases. + Each instance has a "configuration", which dictates where the serving resources for the Cloud Spanner instance are located (e.g., US-central, Europe). Configurations are created by Google diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py index c743fa011d..2ba6d65087 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py @@ -43,8 +43,8 @@ from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import ( InstanceAdminTransport, @@ -505,10 +505,12 @@ class InstanceAdminRestTransport(InstanceAdminTransport): """REST backend transport for InstanceAdmin. Cloud Spanner Instance Admin API + The Cloud Spanner Instance Admin API can be used to create, delete, modify and list instances. Instances are dedicated Cloud Spanner serving and storage resources to be used by Cloud Spanner databases. + Each instance has a "configuration", which dictates where the serving resources for the Cloud Spanner instance are located (e.g., US-central, Europe). Configurations are created by Google diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index a394467ffd..977970ce7e 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -60,6 +60,7 @@ class SpannerAsyncClient: """Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute transactions on data stored in Cloud Spanner databases. """ @@ -357,6 +358,7 @@ async def batch_create_sessions( metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.BatchCreateSessionsResponse: r"""Creates multiple new sessions. + This API can be used to initialize a session cache on the clients. See https://goo.gl/TgSFN2 for best practices on session cache management. diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index f3130c56f6..59dc4f222c 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -99,6 +99,7 @@ def get_transport_class( class SpannerClient(metaclass=SpannerClientMeta): """Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute transactions on data stored in Cloud Spanner databases. """ @@ -604,6 +605,7 @@ def batch_create_sessions( metadata: Sequence[Tuple[str, str]] = (), ) -> spanner.BatchCreateSessionsResponse: r"""Creates multiple new sessions. + This API can be used to initialize a session cache on the clients. See https://goo.gl/TgSFN2 for best practices on session cache management. diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index e54453671b..7236f0ed27 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -36,6 +36,7 @@ class SpannerGrpcTransport(SpannerTransport): """gRPC backend transport for Spanner. Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute transactions on data stored in Cloud Spanner databases. @@ -288,6 +289,7 @@ def batch_create_sessions( r"""Return a callable for the batch create sessions method over gRPC. Creates multiple new sessions. + This API can be used to initialize a session cache on the clients. See https://goo.gl/TgSFN2 for best practices on session cache management. diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index 78548aa2f8..62a975c319 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -37,6 +37,7 @@ class SpannerGrpcAsyncIOTransport(SpannerTransport): """gRPC AsyncIO backend transport for Spanner. Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute transactions on data stored in Cloud Spanner databases. @@ -292,6 +293,7 @@ def batch_create_sessions( r"""Return a callable for the batch create sessions method over gRPC. Creates multiple new sessions. + This API can be used to initialize a session cache on the clients. See https://goo.gl/TgSFN2 for best practices on session cache management. diff --git a/google/cloud/spanner_v1/services/spanner/transports/rest.py b/google/cloud/spanner_v1/services/spanner/transports/rest.py index 83abd878df..d7157886a5 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/rest.py +++ b/google/cloud/spanner_v1/services/spanner/transports/rest.py @@ -493,6 +493,7 @@ class SpannerRestTransport(SpannerTransport): """REST backend transport for Spanner. Cloud Spanner API + The Cloud Spanner API can be used to manage sessions and execute transactions on data stored in Cloud Spanner databases. diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index b69e61012e..310cf8e31f 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -390,6 +390,7 @@ class ExecuteSqlRequest(proto.Message): should be performed. transaction (google.cloud.spanner_v1.types.TransactionSelector): The transaction to use. + For queries, if none is provided, the default is a temporary read-only transaction with strong concurrency. @@ -399,6 +400,7 @@ class ExecuteSqlRequest(proto.Message): single-use transactions are not supported. The caller must either supply an existing transaction ID or begin a new transaction. + Partitioned DML requires an existing Partitioned DML transaction ID. sql (str): @@ -469,6 +471,7 @@ class ExecuteSqlRequest(proto.Message): sequence number, the transaction may be aborted. Replays of previously handled requests will yield the same response as the first execution. + Required for DML statements. Ignored for queries. query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): diff --git a/google/cloud/spanner_v1/types/transaction.py b/google/cloud/spanner_v1/types/transaction.py index d07b2f73c4..57761569d1 100644 --- a/google/cloud/spanner_v1/types/transaction.py +++ b/google/cloud/spanner_v1/types/transaction.py @@ -417,13 +417,16 @@ class ReadLockMode(proto.Enum): Values: READ_LOCK_MODE_UNSPECIFIED (0): Default value. + If the value is not specified, the pessimistic read lock is used. PESSIMISTIC (1): Pessimistic lock mode. + Read locks are acquired immediately on read. OPTIMISTIC (2): Optimistic lock mode. + Locks for reads within the transaction are not acquired on read. Instead the locks are acquired on a commit to validate that read/queried data diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index 0ede9fccff..11932ae5e8 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.40.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 76f704e8fb..9572d4d727 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.40.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index a645b19356..a8e8be3ae3 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.40.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index 6f5ec35284..48d5447d37 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -63,7 +63,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index 29c6a1621e..7dbdb8a7f5 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -60,7 +60,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore From a2f87b9d9591562877696526634f0c7c4dd822dd Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sun, 8 Oct 2023 10:21:59 -0400 Subject: [PATCH 02/13] fix: require google-cloud-core >= 1.4.4 (#1015) --- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 7f72131638..1738eed2ea 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-cloud-core >= 1.4.1, < 3.0dev", + "google-cloud-core >= 1.4.4, < 3.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", "proto-plus >= 1.22.0, <2.0.0dev", "sqlparse >= 0.4.4", diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index cddc7be6e5..165814fd90 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -google-cloud-core==1.4.1 +google-cloud-core==1.4.4 grpc-google-iam-v1==0.12.4 libcst==0.2.5 proto-plus==1.22.0 From a9566ed425aa6ed57e6c0f50938ae09a9e555875 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 09:02:31 -0400 Subject: [PATCH 03/13] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#1016) Source-Link: https://github.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- .gitignore | 1 + .kokoro/requirements.txt | 49 ++++++++++++++++++++------------------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a3da1b0d4c..a9bdb1b7ac 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/.gitignore b/.gitignore index b4243ced74..d083ea1ddc 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 029bd342de..96d593c8c8 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From d0e4ffccea071feaa2ca012a0e3f60a945ed1a13 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 09:03:12 -0400 Subject: [PATCH 04/13] feat: add BatchWrite API (#1011) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add BatchWrite API PiperOrigin-RevId: 567412157 Source-Link: https://github.com/googleapis/googleapis/commit/64fd42cf49523091f790e687a2e4036eea519e64 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9e53103ff3c06af94e583af7baa3c7fcafe78322 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWU1MzEwM2ZmM2MwNmFmOTRlNTgzYWY3YmFhM2M3ZmNhZmU3ODMyMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- google/cloud/spanner_v1/gapic_metadata.json | 15 + .../services/spanner/async_client.py | 137 ++++ .../spanner_v1/services/spanner/client.py | 137 ++++ .../services/spanner/transports/base.py | 14 + .../services/spanner/transports/grpc.py | 44 ++ .../spanner/transports/grpc_asyncio.py | 44 ++ .../services/spanner/transports/rest.py | 132 ++++ google/cloud/spanner_v1/types/__init__.py | 4 + google/cloud/spanner_v1/types/spanner.py | 81 +++ .../snippet_metadata_google.spanner.v1.json | 169 +++++ ..._v1_generated_spanner_batch_write_async.py | 57 ++ ...r_v1_generated_spanner_batch_write_sync.py | 57 ++ scripts/fixup_spanner_v1_keywords.py | 1 + tests/unit/gapic/spanner_v1/test_spanner.py | 599 ++++++++++++++++++ 14 files changed, 1491 insertions(+) create mode 100644 samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py create mode 100644 samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py diff --git a/google/cloud/spanner_v1/gapic_metadata.json b/google/cloud/spanner_v1/gapic_metadata.json index ea51736a55..f5957c633a 100644 --- a/google/cloud/spanner_v1/gapic_metadata.json +++ b/google/cloud/spanner_v1/gapic_metadata.json @@ -15,6 +15,11 @@ "batch_create_sessions" ] }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, "BeginTransaction": { "methods": [ "begin_transaction" @@ -95,6 +100,11 @@ "batch_create_sessions" ] }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, "BeginTransaction": { "methods": [ "begin_transaction" @@ -175,6 +185,11 @@ "batch_create_sessions" ] }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, "BeginTransaction": { "methods": [ "begin_transaction" diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index 977970ce7e..7c2e950793 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -1973,6 +1973,143 @@ async def sample_partition_read(): # Done; return the response. return response + def batch_write( + self, + request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, + *, + session: Optional[str] = None, + mutation_groups: Optional[ + MutableSequence[spanner.BatchWriteRequest.MutationGroup] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[spanner.BatchWriteResponse]]: + r"""Batches the supplied mutation groups in a collection + of efficient transactions. All mutations in a group are + committed atomically. However, mutations across groups + can be committed non-atomically in an unspecified order + and thus, they must be independent of each other. + Partial failure is possible, i.e., some groups may have + been committed successfully, while some may have failed. + The results of individual batches are streamed into the + response as the batches are applied. + + BatchWrite requests are not replay protected, meaning + that each mutation group may be applied more than once. + Replays of non-idempotent mutations may have undesirable + effects. For example, replays of an insert mutation may + produce an already exists error or if you use generated + or commit timestamp-based keys, it may result in + additional rows being added to the mutation's table. We + recommend structuring your mutation groups to be + idempotent to avoid this issue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = await client.batch_write(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]]): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + session (:class:`str`): + Required. The session in which the + batch request is to be run. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutation_groups (:class:`MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]`): + Required. The groups of mutations to + be applied. + + This corresponds to the ``mutation_groups`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.BatchWriteResponse]: + The result of applying a batch of + mutations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, mutation_groups]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = spanner.BatchWriteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if mutation_groups: + request.mutation_groups.extend(mutation_groups) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_write, + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("session", request.session),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "SpannerAsyncClient": return self diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index 59dc4f222c..03907a1b0b 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -2119,6 +2119,143 @@ def sample_partition_read(): # Done; return the response. return response + def batch_write( + self, + request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, + *, + session: Optional[str] = None, + mutation_groups: Optional[ + MutableSequence[spanner.BatchWriteRequest.MutationGroup] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[spanner.BatchWriteResponse]: + r"""Batches the supplied mutation groups in a collection + of efficient transactions. All mutations in a group are + committed atomically. However, mutations across groups + can be committed non-atomically in an unspecified order + and thus, they must be independent of each other. + Partial failure is possible, i.e., some groups may have + been committed successfully, while some may have failed. + The results of individual batches are streamed into the + response as the batches are applied. + + BatchWrite requests are not replay protected, meaning + that each mutation group may be applied more than once. + Replays of non-idempotent mutations may have undesirable + effects. For example, replays of an insert mutation may + produce an already exists error or if you use generated + or commit timestamp-based keys, it may result in + additional rows being added to the mutation's table. We + recommend structuring your mutation groups to be + idempotent to avoid this issue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = client.batch_write(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + session (str): + Required. The session in which the + batch request is to be run. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): + Required. The groups of mutations to + be applied. + + This corresponds to the ``mutation_groups`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]: + The result of applying a batch of + mutations. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([session, mutation_groups]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a spanner.BatchWriteRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, spanner.BatchWriteRequest): + request = spanner.BatchWriteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if mutation_groups is not None: + request.mutation_groups = mutation_groups + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("session", request.session),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "SpannerClient": return self diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py index 668191c5f2..27006d8fbc 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -322,6 +322,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.batch_write: gapic_v1.method.wrap_method( + self.batch_write, + default_timeout=3600.0, + client_info=client_info, + ), } def close(self): @@ -473,6 +478,15 @@ def partition_read( ]: raise NotImplementedError() + @property + def batch_write( + self, + ) -> Callable[ + [spanner.BatchWriteRequest], + Union[spanner.BatchWriteResponse, Awaitable[spanner.BatchWriteResponse]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index 7236f0ed27..86d9ba4133 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -755,6 +755,50 @@ def partition_read( ) return self._stubs["partition_read"] + @property + def batch_write( + self, + ) -> Callable[[spanner.BatchWriteRequest], spanner.BatchWriteResponse]: + r"""Return a callable for the batch write method over gRPC. + + Batches the supplied mutation groups in a collection + of efficient transactions. All mutations in a group are + committed atomically. However, mutations across groups + can be committed non-atomically in an unspecified order + and thus, they must be independent of each other. + Partial failure is possible, i.e., some groups may have + been committed successfully, while some may have failed. + The results of individual batches are streamed into the + response as the batches are applied. + + BatchWrite requests are not replay protected, meaning + that each mutation group may be applied more than once. + Replays of non-idempotent mutations may have undesirable + effects. For example, replays of an insert mutation may + produce an already exists error or if you use generated + or commit timestamp-based keys, it may result in + additional rows being added to the mutation's table. We + recommend structuring your mutation groups to be + idempotent to avoid this issue. + + Returns: + Callable[[~.BatchWriteRequest], + ~.BatchWriteResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_write" not in self._stubs: + self._stubs["batch_write"] = self.grpc_channel.unary_stream( + "/google.spanner.v1.Spanner/BatchWrite", + request_serializer=spanner.BatchWriteRequest.serialize, + response_deserializer=spanner.BatchWriteResponse.deserialize, + ) + return self._stubs["batch_write"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index 62a975c319..d0755e3a67 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -771,6 +771,50 @@ def partition_read( ) return self._stubs["partition_read"] + @property + def batch_write( + self, + ) -> Callable[[spanner.BatchWriteRequest], Awaitable[spanner.BatchWriteResponse]]: + r"""Return a callable for the batch write method over gRPC. + + Batches the supplied mutation groups in a collection + of efficient transactions. All mutations in a group are + committed atomically. However, mutations across groups + can be committed non-atomically in an unspecified order + and thus, they must be independent of each other. + Partial failure is possible, i.e., some groups may have + been committed successfully, while some may have failed. + The results of individual batches are streamed into the + response as the batches are applied. + + BatchWrite requests are not replay protected, meaning + that each mutation group may be applied more than once. + Replays of non-idempotent mutations may have undesirable + effects. For example, replays of an insert mutation may + produce an already exists error or if you use generated + or commit timestamp-based keys, it may result in + additional rows being added to the mutation's table. We + recommend structuring your mutation groups to be + idempotent to avoid this issue. + + Returns: + Callable[[~.BatchWriteRequest], + Awaitable[~.BatchWriteResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_write" not in self._stubs: + self._stubs["batch_write"] = self.grpc_channel.unary_stream( + "/google.spanner.v1.Spanner/BatchWrite", + request_serializer=spanner.BatchWriteRequest.serialize, + response_deserializer=spanner.BatchWriteResponse.deserialize, + ) + return self._stubs["batch_write"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/spanner_v1/services/spanner/transports/rest.py b/google/cloud/spanner_v1/services/spanner/transports/rest.py index d7157886a5..5e32bfaf2a 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/rest.py +++ b/google/cloud/spanner_v1/services/spanner/transports/rest.py @@ -78,6 +78,14 @@ def post_batch_create_sessions(self, response): logging.log(f"Received response: {response}") return response + def pre_batch_write(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_write(self, response): + logging.log(f"Received response: {response}") + return response + def pre_begin_transaction(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -211,6 +219,27 @@ def post_batch_create_sessions( """ return response + def pre_batch_write( + self, request: spanner.BatchWriteRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[spanner.BatchWriteRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_write + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_batch_write( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for batch_write + + Override in a subclass to manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. + """ + return response + def pre_begin_transaction( self, request: spanner.BeginTransactionRequest, @@ -681,6 +710,101 @@ def __call__( resp = self._interceptor.post_batch_create_sessions(resp) return resp + class _BatchWrite(SpannerRestStub): + def __hash__(self): + return hash("BatchWrite") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: spanner.BatchWriteRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the batch write method over HTTP. + + Args: + request (~.spanner.BatchWriteRequest): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.spanner.BatchWriteResponse: + The result of applying a batch of + mutations. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_batch_write(request, metadata) + pb_request = spanner.BatchWriteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, spanner.BatchWriteResponse) + resp = self._interceptor.post_batch_write(resp) + return resp + class _BeginTransaction(SpannerRestStub): def __hash__(self): return hash("BeginTransaction") @@ -2056,6 +2180,14 @@ def batch_create_sessions( # In C++ this would require a dynamic_cast return self._BatchCreateSessions(self._session, self._host, self._interceptor) # type: ignore + @property + def batch_write( + self, + ) -> Callable[[spanner.BatchWriteRequest], spanner.BatchWriteResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore + @property def begin_transaction( self, diff --git a/google/cloud/spanner_v1/types/__init__.py b/google/cloud/spanner_v1/types/__init__.py index df0960d9d9..f4f619f6c4 100644 --- a/google/cloud/spanner_v1/types/__init__.py +++ b/google/cloud/spanner_v1/types/__init__.py @@ -36,6 +36,8 @@ from .spanner import ( BatchCreateSessionsRequest, BatchCreateSessionsResponse, + BatchWriteRequest, + BatchWriteResponse, BeginTransactionRequest, CommitRequest, CreateSessionRequest, @@ -81,6 +83,8 @@ "ResultSetStats", "BatchCreateSessionsRequest", "BatchCreateSessionsResponse", + "BatchWriteRequest", + "BatchWriteResponse", "BeginTransactionRequest", "CommitRequest", "CreateSessionRequest", diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index 310cf8e31f..dfd83ac165 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -53,6 +53,8 @@ "BeginTransactionRequest", "CommitRequest", "RollbackRequest", + "BatchWriteRequest", + "BatchWriteResponse", }, ) @@ -1329,4 +1331,83 @@ class RollbackRequest(proto.Message): ) +class BatchWriteRequest(proto.Message): + r"""The request for [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + + Attributes: + session (str): + Required. The session in which the batch + request is to be run. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): + Required. The groups of mutations to be + applied. + """ + + class MutationGroup(proto.Message): + r"""A group of mutations to be committed together. Related + mutations should be placed in a group. For example, two + mutations inserting rows with the same primary key prefix in + both parent and child tables are related. + + Attributes: + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + Required. The mutations in this group. + """ + + mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=mutation.Mutation, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + request_options: "RequestOptions" = proto.Field( + proto.MESSAGE, + number=3, + message="RequestOptions", + ) + mutation_groups: MutableSequence[MutationGroup] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=MutationGroup, + ) + + +class BatchWriteResponse(proto.Message): + r"""The result of applying a batch of mutations. + + Attributes: + indexes (MutableSequence[int]): + The mutation groups applied in this batch. The values index + into the ``mutation_groups`` field in the corresponding + ``BatchWriteRequest``. + status (google.rpc.status_pb2.Status): + An ``OK`` status indicates success. Any other status + indicates a failure. + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + The commit timestamp of the transaction that applied this + batch. Present if ``status`` is ``OK``, absent otherwise. + """ + + indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=1, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index a8e8be3ae3..4384d19e2a 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -180,6 +180,175 @@ ], "title": "spanner_v1_generated_spanner_batch_create_sessions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_write", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchWrite", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchWriteRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "mutation_groups", + "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "spanner_v1_generated_spanner_batch_write_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchWrite_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_write_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.batch_write", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchWrite", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchWriteRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "mutation_groups", + "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "spanner_v1_generated_spanner_batch_write_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchWrite_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_write_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py new file mode 100644 index 0000000000..39352562b1 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchWrite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = await client.batch_write(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_BatchWrite_async] diff --git a/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py new file mode 100644 index 0000000000..4ee88b0cd6 --- /dev/null +++ b/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchWrite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = client.batch_write(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_BatchWrite_sync] diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py index df4d3501f2..b1ba4084df 100644 --- a/scripts/fixup_spanner_v1_keywords.py +++ b/scripts/fixup_spanner_v1_keywords.py @@ -40,6 +40,7 @@ class spannerCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'batch_create_sessions': ('database', 'session_count', 'session_template', ), + 'batch_write': ('session', 'mutation_groups', 'request_options', ), 'begin_transaction': ('session', 'options', 'request_options', ), 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'request_options', ), 'create_session': ('database', 'session', ), diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index 8bf8407724..7f593f1953 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -3857,6 +3857,292 @@ async def test_partition_read_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + spanner.BatchWriteRequest, + dict, + ], +) +def test_batch_write(request_type, transport: str = "grpc"): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + response = client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchWriteRequest() + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, spanner.BatchWriteResponse) + + +def test_batch_write_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + client.batch_write() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchWriteRequest() + + +@pytest.mark.asyncio +async def test_batch_write_async( + transport: str = "grpc_asyncio", request_type=spanner.BatchWriteRequest +): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[spanner.BatchWriteResponse()] + ) + response = await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchWriteRequest() + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, spanner.BatchWriteResponse) + + +@pytest.mark.asyncio +async def test_batch_write_async_from_dict(): + await test_batch_write_async(request_type=dict) + + +def test_batch_write_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchWriteRequest() + + request.session = "session_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + call.return_value = iter([spanner.BatchWriteResponse()]) + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "session=session_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_write_field_headers_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchWriteRequest() + + request.session = "session_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[spanner.BatchWriteResponse()] + ) + await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "session=session_value", + ) in kw["metadata"] + + +def test_batch_write_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_write( + session="session_value", + mutation_groups=[ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = "session_value" + assert arg == mock_val + arg = args[0].mutation_groups + mock_val = [ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ] + assert arg == mock_val + + +def test_batch_write_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_write( + spanner.BatchWriteRequest(), + session="session_value", + mutation_groups=[ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ], + ) + + +@pytest.mark.asyncio +async def test_batch_write_flattened_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_write), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_write( + session="session_value", + mutation_groups=[ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = "session_value" + assert arg == mock_val + arg = args[0].mutation_groups + mock_val = [ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_write_flattened_error_async(): + client = SpannerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_write( + spanner.BatchWriteRequest(), + session="session_value", + mutation_groups=[ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ], + ) + + @pytest.mark.parametrize( "request_type", [ @@ -7695,6 +7981,315 @@ def test_partition_read_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + spanner.BatchWriteRequest, + dict, + ], +) +def test_batch_write_rest(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "session": "projects/sample1/instances/sample2/databases/sample3/sessions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse( + indexes=[752], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.batch_write(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchWriteResponse) + assert response.indexes == [752] + + +def test_batch_write_rest_required_fields(request_type=spanner.BatchWriteRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = "session_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == "session_value" + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.batch_write(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_write_rest_unset_required_fields(): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_write._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "session", + "mutationGroups", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_write_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpannerRestInterceptor, "post_batch_write" + ) as post, mock.patch.object( + transports.SpannerRestInterceptor, "pre_batch_write" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = spanner.BatchWriteRequest.pb(spanner.BatchWriteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = spanner.BatchWriteResponse.to_json( + spanner.BatchWriteResponse() + ) + req.return_value._content = "[{}]".format(req.return_value._content) + + request = spanner.BatchWriteRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.BatchWriteResponse() + + client.batch_write( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_write_rest_bad_request( + transport: str = "rest", request_type=spanner.BatchWriteRequest +): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "session": "projects/sample1/instances/sample2/databases/sample3/sessions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_write(request) + + +def test_batch_write_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "session": "projects/sample1/instances/sample2/databases/sample3/sessions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + session="session_value", + mutation_groups=[ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.batch_write(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite" + % client.transport._host, + args[1], + ) + + +def test_batch_write_rest_flattened_error(transport: str = "rest"): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_write( + spanner.BatchWriteRequest(), + session="session_value", + mutation_groups=[ + spanner.BatchWriteRequest.MutationGroup( + mutations=[ + mutation.Mutation( + insert=mutation.Mutation.Write(table="table_value") + ) + ] + ) + ], + ) + + +def test_batch_write_rest_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SpannerGrpcTransport( @@ -7849,6 +8444,7 @@ def test_spanner_base_transport(): "rollback", "partition_query", "partition_read", + "batch_write", ) for method in methods: with pytest.raises(NotImplementedError): @@ -8161,6 +8757,9 @@ def test_spanner_client_transport_session_collision(transport_name): session1 = client1.transport.partition_read._session session2 = client2.transport.partition_read._session assert session1 != session2 + session1 = client1.transport.batch_write._session + session2 = client2.transport.batch_write._session + assert session1 != session2 def test_spanner_grpc_transport_channel(): From e9c6e27cbb7e2ede697e095736d22556f373eb03 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 10 Oct 2023 10:08:53 -0400 Subject: [PATCH 05/13] chore: [autoapprove] Update `black` and `isort` to latest versions (#1020) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 6 ++-- .pre-commit-config.yaml | 2 +- .../database_admin/transports/rest.py | 4 --- google/cloud/spanner_v1/database.py | 1 - google/cloud/spanner_v1/session.py | 1 - noxfile.py | 36 ++++++++++--------- tests/system/_sample_data.py | 1 - tests/system/conftest.py | 1 - tests/system/test_dbapi.py | 1 - tests/system/test_session_api.py | 10 ------ tests/unit/spanner_dbapi/test_cursor.py | 9 ----- tests/unit/spanner_dbapi/test_parse_utils.py | 3 +- tests/unit/spanner_dbapi/test_parser.py | 1 - tests/unit/spanner_dbapi/test_types.py | 1 - tests/unit/spanner_dbapi/test_utils.py | 1 - tests/unit/test_batch.py | 2 -- tests/unit/test_client.py | 1 - tests/unit/test_database.py | 5 --- tests/unit/test_instance.py | 2 -- tests/unit/test_keyset.py | 1 - tests/unit/test_pool.py | 3 -- tests/unit/test_session.py | 1 - tests/unit/test_snapshot.py | 4 --- tests/unit/test_spanner.py | 3 -- tests/unit/test_streamed.py | 4 +-- tests/unit/test_transaction.py | 3 -- 27 files changed, 27 insertions(+), 84 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a9bdb1b7ac..dd98abbdee 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 96d593c8c8..0332d3267e 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19409cbd37..6a8e169506 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py index 5aaedde91c..07fe33ae45 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -3183,7 +3183,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the cancel operation method over HTTP. Args: @@ -3258,7 +3257,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Call the delete operation method over HTTP. Args: @@ -3333,7 +3331,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: - r"""Call the get operation method over HTTP. Args: @@ -3412,7 +3409,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: - r"""Call the list operations method over HTTP. Args: diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py index 1d211f7d6d..eee34361b3 100644 --- a/google/cloud/spanner_v1/database.py +++ b/google/cloud/spanner_v1/database.py @@ -648,7 +648,6 @@ def execute_partitioned_dml( def execute_pdml(): with SessionCheckout(self._pool) as session: - txn = api.begin_transaction( session=session.name, options=txn_options, metadata=metadata ) diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py index 256e72511b..b25af53805 100644 --- a/google/cloud/spanner_v1/session.py +++ b/google/cloud/spanner_v1/session.py @@ -441,7 +441,6 @@ def _delay_until_retry(exc, deadline, attempts): delay = _get_retry_delay(cause, attempts) if delay is not None: - if now + delay > deadline: raise diff --git a/noxfile.py b/noxfile.py index 95fe0d2365..e1677c220b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,22 +17,24 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -40,25 +42,25 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [ +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [ "tracing", ] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -71,6 +73,7 @@ "lint_setup_py", "blacken", "docs", + "format", ] # Error if a python version is missing @@ -210,7 +213,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 diff --git a/tests/system/_sample_data.py b/tests/system/_sample_data.py index a7f3b80a86..2398442aff 100644 --- a/tests/system/_sample_data.py +++ b/tests/system/_sample_data.py @@ -70,7 +70,6 @@ def _check_row_data(row_data, expected, recurse_into_lists=True): def _check_cell_data(found_cell, expected_cell, recurse_into_lists=True): - if isinstance(found_cell, datetime_helpers.DatetimeWithNanoseconds): _assert_timestamp(expected_cell, found_cell) diff --git a/tests/system/conftest.py b/tests/system/conftest.py index fdeab14c8f..b297d1f2ad 100644 --- a/tests/system/conftest.py +++ b/tests/system/conftest.py @@ -119,7 +119,6 @@ def instance_configs(spanner_client): configs = list(_helpers.retry_503(spanner_client.list_instance_configs)()) if not _helpers.USE_EMULATOR: - # Defend against back-end returning configs for regions we aren't # actually allowed to use. configs = [config for config in configs if "-us-" in config.name] diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py index cb5a11e89d..29617ad614 100644 --- a/tests/system/test_dbapi.py +++ b/tests/system/test_dbapi.py @@ -64,7 +64,6 @@ def clear_table(transaction): @pytest.fixture(scope="function") def dbapi_database(raw_database): - raw_database.run_in_transaction(clear_table) yield raw_database diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index 7d58324b04..c4ea2ded40 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -306,7 +306,6 @@ def assert_span_attributes( def _make_attributes(db_instance, **kwargs): - attributes = { "db.type": "spanner", "db.url": "spanner.googleapis.com", @@ -1099,7 +1098,6 @@ def test_transaction_batch_update_w_parent_span( ) def unit_of_work(transaction): - status, row_counts = transaction.batch_update( [insert_statement, update_statement, delete_statement] ) @@ -1303,7 +1301,6 @@ def _row_data(max_index): def _set_up_table(database, row_count): - sd = _sample_data def _unit_of_work(transaction): @@ -1430,7 +1427,6 @@ def test_multiuse_snapshot_read_isolation_read_timestamp(sessions_database): with sessions_database.snapshot( read_timestamp=committed, multi_use=True ) as read_ts: - before = list(read_ts.read(sd.TABLE, sd.COLUMNS, sd.ALL)) sd._check_row_data(before, all_data_rows) @@ -1452,7 +1448,6 @@ def test_multiuse_snapshot_read_isolation_exact_staleness(sessions_database): delta = datetime.timedelta(microseconds=1000) with sessions_database.snapshot(exact_staleness=delta, multi_use=True) as exact: - before = list(exact.read(sd.TABLE, sd.COLUMNS, sd.ALL)) sd._check_row_data(before, all_data_rows) @@ -1945,7 +1940,6 @@ def test_multiuse_snapshot_execute_sql_isolation_strong(sessions_database): all_data_rows = list(_row_data(row_count)) with sessions_database.snapshot(multi_use=True) as strong: - before = list(strong.execute_sql(sd.SQL)) sd._check_row_data(before, all_data_rows) @@ -2005,7 +1999,6 @@ def test_invalid_type(sessions_database): def test_execute_sql_select_1(sessions_database): - sessions_database.snapshot(multi_use=True) # Hello, world query @@ -2175,7 +2168,6 @@ def test_execute_sql_w_bytes_bindings(sessions_database, database_dialect): def test_execute_sql_w_timestamp_bindings(sessions_database, database_dialect): - timestamp_1 = datetime_helpers.DatetimeWithNanoseconds( 1989, 1, 17, 17, 59, 12, nanosecond=345612789 ) @@ -2462,7 +2454,6 @@ def test_execute_sql_w_query_param_struct(sessions_database, not_postgres): def test_execute_sql_returning_transfinite_floats(sessions_database, not_postgres): - with sessions_database.snapshot(multi_use=True) as snapshot: # Query returning -inf, +inf, NaN as column values rows = list( @@ -2537,7 +2528,6 @@ def details(self): def _check_batch_status(status_code, expected=code_pb2.OK): if status_code != expected: - _status_code_to_grpc_status_code = { member.value[0]: member for member in grpc.StatusCode } diff --git a/tests/unit/spanner_dbapi/test_cursor.py b/tests/unit/spanner_dbapi/test_cursor.py index f744fc769f..46a093b109 100644 --- a/tests/unit/spanner_dbapi/test_cursor.py +++ b/tests/unit/spanner_dbapi/test_cursor.py @@ -20,7 +20,6 @@ class TestCursor(unittest.TestCase): - INSTANCE = "test-instance" DATABASE = "test-database" @@ -917,7 +916,6 @@ def test_fetchone_retry_aborted(self, mock_client): with mock.patch( "google.cloud.spanner_dbapi.connection.Connection.retry_transaction" ) as retry_mock: - cursor.fetchone() retry_mock.assert_called_with() @@ -948,7 +946,6 @@ def test_fetchone_retry_aborted_statements(self, mock_client): "google.cloud.spanner_dbapi.connection.Connection.run_statement", return_value=([row], ResultsChecksum()), ) as run_mock: - cursor.fetchone() run_mock.assert_called_with(statement, retried=True) @@ -982,7 +979,6 @@ def test_fetchone_retry_aborted_statements_checksums_mismatch(self, mock_client) "google.cloud.spanner_dbapi.connection.Connection.run_statement", return_value=([row2], ResultsChecksum()), ) as run_mock: - with self.assertRaises(RetryAborted): cursor.fetchone() @@ -1007,7 +1003,6 @@ def test_fetchall_retry_aborted(self, mock_client): with mock.patch( "google.cloud.spanner_dbapi.connection.Connection.retry_transaction" ) as retry_mock: - cursor.fetchall() retry_mock.assert_called_with() @@ -1071,7 +1066,6 @@ def test_fetchall_retry_aborted_statements_checksums_mismatch(self, mock_client) "google.cloud.spanner_dbapi.connection.Connection.run_statement", return_value=([row2], ResultsChecksum()), ) as run_mock: - with self.assertRaises(RetryAborted): cursor.fetchall() @@ -1096,7 +1090,6 @@ def test_fetchmany_retry_aborted(self, mock_client): with mock.patch( "google.cloud.spanner_dbapi.connection.Connection.retry_transaction" ) as retry_mock: - cursor.fetchmany() retry_mock.assert_called_with() @@ -1127,7 +1120,6 @@ def test_fetchmany_retry_aborted_statements(self, mock_client): "google.cloud.spanner_dbapi.connection.Connection.run_statement", return_value=([row], ResultsChecksum()), ) as run_mock: - cursor.fetchmany(len(row)) run_mock.assert_called_with(statement, retried=True) @@ -1161,7 +1153,6 @@ def test_fetchmany_retry_aborted_statements_checksums_mismatch(self, mock_client "google.cloud.spanner_dbapi.connection.Connection.run_statement", return_value=([row2], ResultsChecksum()), ) as run_mock: - with self.assertRaises(RetryAborted): cursor.fetchmany(len(row)) diff --git a/tests/unit/spanner_dbapi/test_parse_utils.py b/tests/unit/spanner_dbapi/test_parse_utils.py index ddd1d5572a..887f984c2c 100644 --- a/tests/unit/spanner_dbapi/test_parse_utils.py +++ b/tests/unit/spanner_dbapi/test_parse_utils.py @@ -20,7 +20,6 @@ class TestParseUtils(unittest.TestCase): - skip_condition = sys.version_info[0] < 3 skip_message = "Subtests are not supported in Python 2" @@ -112,7 +111,7 @@ def test_sql_pyformat_args_to_spanner(self): ("SELECT * from t WHERE id=10", {"f1": "app", "f2": "name"}), ), ] - for ((sql_in, params), sql_want) in cases: + for (sql_in, params), sql_want in cases: with self.subTest(sql=sql_in): got_sql, got_named_args = sql_pyformat_args_to_spanner(sql_in, params) want_sql, want_named_args = sql_want diff --git a/tests/unit/spanner_dbapi/test_parser.py b/tests/unit/spanner_dbapi/test_parser.py index dd99f6fa4b..25f51591c2 100644 --- a/tests/unit/spanner_dbapi/test_parser.py +++ b/tests/unit/spanner_dbapi/test_parser.py @@ -17,7 +17,6 @@ class TestParser(unittest.TestCase): - skip_condition = sys.version_info[0] < 3 skip_message = "Subtests are not supported in Python 2" diff --git a/tests/unit/spanner_dbapi/test_types.py b/tests/unit/spanner_dbapi/test_types.py index 8c9dbe6c2b..375dc31853 100644 --- a/tests/unit/spanner_dbapi/test_types.py +++ b/tests/unit/spanner_dbapi/test_types.py @@ -18,7 +18,6 @@ class TestTypes(unittest.TestCase): - TICKS = 1572822862.9782631 + timezone # Sun 03 Nov 2019 23:14:22 UTC def test__date_from_ticks(self): diff --git a/tests/unit/spanner_dbapi/test_utils.py b/tests/unit/spanner_dbapi/test_utils.py index 76c347d402..fadbca1a09 100644 --- a/tests/unit/spanner_dbapi/test_utils.py +++ b/tests/unit/spanner_dbapi/test_utils.py @@ -17,7 +17,6 @@ class TestUtils(unittest.TestCase): - skip_condition = sys.version_info[0] < 3 skip_message = "Subtests are not supported in Python 2" diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py index 0199d44033..856816628f 100644 --- a/tests/unit/test_batch.py +++ b/tests/unit/test_batch.py @@ -32,7 +32,6 @@ class _BaseTest(unittest.TestCase): - PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID @@ -426,7 +425,6 @@ class _Database(object): class _FauxSpannerAPI: - _create_instance_conflict = False _instance_not_found = False _committed = None diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index ed79271a96..049ee1124f 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -29,7 +29,6 @@ class _CredentialsWithScopes( class TestClient(unittest.TestCase): - PROJECT = "PROJECT" PATH = "projects/%s" % (PROJECT,) CONFIGURATION_NAME = "config-name" diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py index 5a6abf8084..bd368eed11 100644 --- a/tests/unit/test_database.py +++ b/tests/unit/test_database.py @@ -49,7 +49,6 @@ class _CredentialsWithScopes( class _BaseTest(unittest.TestCase): - PROJECT_ID = "project-id" PARENT = "projects/" + PROJECT_ID INSTANCE_ID = "instance-id" @@ -148,14 +147,12 @@ def test_ctor_w_route_to_leader_disbled(self): self.assertFalse(database._route_to_leader_enabled) def test_ctor_w_ddl_statements_non_string(self): - with self.assertRaises(ValueError): self._make_one( self.DATABASE_ID, instance=object(), ddl_statements=[object()] ) def test_ctor_w_ddl_statements_w_create_database(self): - with self.assertRaises(ValueError): self._make_one( self.DATABASE_ID, @@ -365,7 +362,6 @@ def test_default_leader(self): self.assertEqual(database.default_leader, default_leader) def test_spanner_api_property_w_scopeless_creds(self): - client = _Client() client_info = client._client_info = mock.Mock() client_options = client._client_options = mock.Mock() @@ -2744,7 +2740,6 @@ def put(self, session): class _Session(object): - _rows = () _created = False _transaction = None diff --git a/tests/unit/test_instance.py b/tests/unit/test_instance.py index 0a7dbccb81..20064e7e88 100644 --- a/tests/unit/test_instance.py +++ b/tests/unit/test_instance.py @@ -17,7 +17,6 @@ class TestInstance(unittest.TestCase): - PROJECT = "project" PARENT = "projects/" + PROJECT INSTANCE_ID = "instance-id" @@ -1031,7 +1030,6 @@ def __eq__(self, other): class _FauxInstanceAdminAPI(object): - _create_instance_conflict = False _instance_not_found = False _rpc_error = False diff --git a/tests/unit/test_keyset.py b/tests/unit/test_keyset.py index a7bad4070d..8fc743e075 100644 --- a/tests/unit/test_keyset.py +++ b/tests/unit/test_keyset.py @@ -205,7 +205,6 @@ def test_ctor_w_ranges(self): self.assertEqual(keyset.ranges, [range_1, range_2]) def test_ctor_w_all_and_keys(self): - with self.assertRaises(ValueError): self._make_one(all_=True, keys=[["key1"], ["key2"]]) diff --git a/tests/unit/test_pool.py b/tests/unit/test_pool.py index 58665634de..23ed3e7251 100644 --- a/tests/unit/test_pool.py +++ b/tests/unit/test_pool.py @@ -913,7 +913,6 @@ def _make_transaction(*args, **kw): @total_ordering class _Session(object): - _transaction = None def __init__(self, database, exists=True, transaction=None): @@ -1004,7 +1003,6 @@ def session(self, **kwargs): class _Queue(object): - _size = 1 def __init__(self, *items): @@ -1035,5 +1033,4 @@ def put_nowait(self, item, **kwargs): class _Pool(_Queue): - _database = None diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index 3125e33f21..0bb02ebdc7 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -37,7 +37,6 @@ def time(self): class TestSession(OpenTelemetryBase): - PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID diff --git a/tests/unit/test_snapshot.py b/tests/unit/test_snapshot.py index 5d2afb4fe6..0010877396 100644 --- a/tests/unit/test_snapshot.py +++ b/tests/unit/test_snapshot.py @@ -56,7 +56,6 @@ def _getTargetClass(self): def _makeDerived(self, session): class _Derived(self._getTargetClass()): - _transaction_id = None _multi_use = False @@ -514,7 +513,6 @@ def test_iteration_w_multiple_span_creation(self): class Test_SnapshotBase(OpenTelemetryBase): - PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID @@ -533,7 +531,6 @@ def _make_one(self, session): def _makeDerived(self, session): class _Derived(self._getTargetClass()): - _transaction_id = None _multi_use = False @@ -1358,7 +1355,6 @@ def test_partition_query_ok_w_timeout_and_retry_params(self): class TestSnapshot(OpenTelemetryBase): - PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID diff --git a/tests/unit/test_spanner.py b/tests/unit/test_spanner.py index e4cd1e84cd..8c04e1142d 100644 --- a/tests/unit/test_spanner.py +++ b/tests/unit/test_spanner.py @@ -88,7 +88,6 @@ class TestTransaction(OpenTelemetryBase): - PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID @@ -344,7 +343,6 @@ def _read_helper( self.assertEqual(result_set.stats, stats_pb) def _read_helper_expected_request(self, partition=None, begin=True, count=0): - if begin is True: expected_transaction = TransactionSelector( begin=TransactionOptions(read_write=TransactionOptions.ReadWrite()) @@ -939,7 +937,6 @@ def __init__(self): class _Session(object): - _transaction = None def __init__(self, database=None, name=TestTransaction.SESSION_NAME): diff --git a/tests/unit/test_streamed.py b/tests/unit/test_streamed.py index 2714ddfb45..85dcb40026 100644 --- a/tests/unit/test_streamed.py +++ b/tests/unit/test_streamed.py @@ -973,7 +973,6 @@ def test___iter___w_existing_rows_read(self): class _MockCancellableIterator(object): - cancel_calls = 0 def __init__(self, *values): @@ -987,7 +986,6 @@ def __next__(self): # pragma: NO COVER Py3k class TestStreamedResultSet_JSON_acceptance_tests(unittest.TestCase): - _json_tests = None def _getTargetClass(self): @@ -1006,7 +1004,7 @@ def _load_json_test(self, test_name): filename = os.path.join(dirname, "streaming-read-acceptance-test.json") raw = _parse_streaming_read_acceptance_tests(filename) tests = self.__class__._json_tests = {} - for (name, partial_result_sets, results) in raw: + for name, partial_result_sets, results in raw: tests[name] = partial_result_sets, results return self.__class__._json_tests[test_name] diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py index 85359dac19..ffcffa115e 100644 --- a/tests/unit/test_transaction.py +++ b/tests/unit/test_transaction.py @@ -42,7 +42,6 @@ class TestTransaction(OpenTelemetryBase): - PROJECT_ID = "project-id" INSTANCE_ID = "instance-id" INSTANCE_NAME = "projects/" + PROJECT_ID + "/instances/" + INSTANCE_ID @@ -910,7 +909,6 @@ def __init__(self): class _Session(object): - _transaction = None def __init__(self, database=None, name=TestTransaction.SESSION_NAME): @@ -919,7 +917,6 @@ def __init__(self, database=None, name=TestTransaction.SESSION_NAME): class _FauxSpannerAPI(object): - _committed = None def __init__(self, **kwargs): From 4d490cf9de600b16a90a1420f8773b2ae927983d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 10:39:25 -0400 Subject: [PATCH 06/13] feat(spanner): add autoscaling config to the instance proto (#1022) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(spanner): add autoscaling config to the instance proto PiperOrigin-RevId: 573098210 Source-Link: https://github.com/googleapis/googleapis/commit/d6467dbbb985d1777b6ab931ce09b8b3b1a7be08 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9ea8b7345ef2d93a49b15a332a682a61714f073e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWVhOGI3MzQ1ZWYyZDkzYTQ5YjE1YTMzMmE2ODJhNjE3MTRmMDczZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../spanner_admin_instance_v1/__init__.py | 2 + .../types/__init__.py | 2 + .../types/spanner_instance_admin.py | 140 +++++++++++++++++- 3 files changed, 140 insertions(+), 4 deletions(-) diff --git a/google/cloud/spanner_admin_instance_v1/__init__.py b/google/cloud/spanner_admin_instance_v1/__init__.py index bf1893144c..e92a5768ad 100644 --- a/google/cloud/spanner_admin_instance_v1/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/__init__.py @@ -22,6 +22,7 @@ from .services.instance_admin import InstanceAdminAsyncClient from .types.common import OperationProgress +from .types.spanner_instance_admin import AutoscalingConfig from .types.spanner_instance_admin import CreateInstanceConfigMetadata from .types.spanner_instance_admin import CreateInstanceConfigRequest from .types.spanner_instance_admin import CreateInstanceMetadata @@ -46,6 +47,7 @@ __all__ = ( "InstanceAdminAsyncClient", + "AutoscalingConfig", "CreateInstanceConfigMetadata", "CreateInstanceConfigRequest", "CreateInstanceMetadata", diff --git a/google/cloud/spanner_admin_instance_v1/types/__init__.py b/google/cloud/spanner_admin_instance_v1/types/__init__.py index 3ee4fcb10a..b4eaac8066 100644 --- a/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ b/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -17,6 +17,7 @@ OperationProgress, ) from .spanner_instance_admin import ( + AutoscalingConfig, CreateInstanceConfigMetadata, CreateInstanceConfigRequest, CreateInstanceMetadata, @@ -42,6 +43,7 @@ __all__ = ( "OperationProgress", + "AutoscalingConfig", "CreateInstanceConfigMetadata", "CreateInstanceConfigRequest", "CreateInstanceMetadata", diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py index 394e799d05..b4c18b85f2 100644 --- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -30,6 +30,7 @@ manifest={ "ReplicaInfo", "InstanceConfig", + "AutoscalingConfig", "Instance", "ListInstanceConfigsRequest", "ListInstanceConfigsResponse", @@ -297,6 +298,116 @@ class State(proto.Enum): ) +class AutoscalingConfig(proto.Message): + r"""Autoscaling config for an instance. + + Attributes: + autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): + Required. Autoscaling limits for an instance. + autoscaling_targets (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingTargets): + Required. The autoscaling targets for an + instance. + """ + + class AutoscalingLimits(proto.Message): + r"""The autoscaling limits for the instance. Users can define the + minimum and maximum compute capacity allocated to the instance, and + the autoscaler will only scale within that range. Users can either + use nodes or processing units to specify the limits, but should use + the same unit to set both the min_limit and max_limit. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_nodes (int): + Minimum number of nodes allocated to the + instance. If set, this number should be greater + than or equal to 1. + + This field is a member of `oneof`_ ``min_limit``. + min_processing_units (int): + Minimum number of processing units allocated + to the instance. If set, this number should be + multiples of 1000. + + This field is a member of `oneof`_ ``min_limit``. + max_nodes (int): + Maximum number of nodes allocated to the instance. If set, + this number should be greater than or equal to min_nodes. + + This field is a member of `oneof`_ ``max_limit``. + max_processing_units (int): + Maximum number of processing units allocated to the + instance. If set, this number should be multiples of 1000 + and be greater than or equal to min_processing_units. + + This field is a member of `oneof`_ ``max_limit``. + """ + + min_nodes: int = proto.Field( + proto.INT32, + number=1, + oneof="min_limit", + ) + min_processing_units: int = proto.Field( + proto.INT32, + number=2, + oneof="min_limit", + ) + max_nodes: int = proto.Field( + proto.INT32, + number=3, + oneof="max_limit", + ) + max_processing_units: int = proto.Field( + proto.INT32, + number=4, + oneof="max_limit", + ) + + class AutoscalingTargets(proto.Message): + r"""The autoscaling targets for an instance. + + Attributes: + high_priority_cpu_utilization_percent (int): + Required. The target high priority cpu utilization + percentage that the autoscaler should be trying to achieve + for the instance. This number is on a scale from 0 (no + utilization) to 100 (full utilization). The valid range is + [10, 90] inclusive. + storage_utilization_percent (int): + Required. The target storage utilization percentage that the + autoscaler should be trying to achieve for the instance. + This number is on a scale from 0 (no utilization) to 100 + (full utilization). The valid range is [10, 100] inclusive. + """ + + high_priority_cpu_utilization_percent: int = proto.Field( + proto.INT32, + number=1, + ) + storage_utilization_percent: int = proto.Field( + proto.INT32, + number=2, + ) + + autoscaling_limits: AutoscalingLimits = proto.Field( + proto.MESSAGE, + number=1, + message=AutoscalingLimits, + ) + autoscaling_targets: AutoscalingTargets = proto.Field( + proto.MESSAGE, + number=2, + message=AutoscalingTargets, + ) + + class Instance(proto.Message): r"""An isolated set of Cloud Spanner resources on which databases can be hosted. @@ -325,8 +436,13 @@ class Instance(proto.Message): node_count (int): The number of nodes allocated to this instance. At most one of either node_count or processing_units should be present - in the message. This may be zero in API responses for - instances that are not yet in state ``READY``. + in the message. + + Users can set the node_count field to specify the target + number of nodes allocated to the instance. + + This may be zero in API responses for instances that are not + yet in state ``READY``. See `the documentation `__ @@ -334,12 +450,23 @@ class Instance(proto.Message): processing_units (int): The number of processing units allocated to this instance. At most one of processing_units or node_count should be - present in the message. This may be zero in API responses - for instances that are not yet in state ``READY``. + present in the message. + + Users can set the processing_units field to specify the + target number of processing units allocated to the instance. + + This may be zero in API responses for instances that are not + yet in state ``READY``. See `the documentation `__ for more information about nodes and processing units. + autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): + Optional. The autoscaling configuration. Autoscaling is + enabled if this field is set. When autoscaling is enabled, + node_count and processing_units are treated as OUTPUT_ONLY + fields and reflect the current compute capacity allocated to + the instance. state (google.cloud.spanner_admin_instance_v1.types.Instance.State): Output only. The current instance state. For [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], @@ -424,6 +551,11 @@ class State(proto.Enum): proto.INT32, number=9, ) + autoscaling_config: "AutoscalingConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="AutoscalingConfig", + ) state: State = proto.Field( proto.ENUM, number=6, From b534a8aac116a824544d63a24e38f3d484e0d207 Mon Sep 17 00:00:00 2001 From: Astha Mohta <35952883+asthamohta@users.noreply.github.com> Date: Wed, 25 Oct 2023 11:19:04 +0530 Subject: [PATCH 07/13] feat: return list of dictionaries for execute streaming sql (#1003) * changes * adding tests * comment changes --- google/cloud/spanner_v1/streamed.py | 21 +++++++++++++++++++++ tests/system/test_session_api.py | 13 +++++++++++++ 2 files changed, 34 insertions(+) diff --git a/google/cloud/spanner_v1/streamed.py b/google/cloud/spanner_v1/streamed.py index 80a452d558..ac8fc71ce6 100644 --- a/google/cloud/spanner_v1/streamed.py +++ b/google/cloud/spanner_v1/streamed.py @@ -190,6 +190,27 @@ def one_or_none(self): except StopIteration: return answer + def to_dict_list(self): + """Return the result of a query as a list of dictionaries. + In each dictionary the key is the column name and the value is the + value of the that column in a given row. + + :rtype: + :class:`list of dict` + :returns: result rows as a list of dictionaries + """ + rows = [] + for row in self: + rows.append( + { + column: value + for column, value in zip( + [column.name for column in self._metadata.row_type.fields], row + ) + } + ) + return rows + class Unmergeable(ValueError): """Unable to merge two values. diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py index c4ea2ded40..4a2ce5f495 100644 --- a/tests/system/test_session_api.py +++ b/tests/system/test_session_api.py @@ -1913,6 +1913,19 @@ def test_execute_sql_w_manual_consume(sessions_database): assert streamed._pending_chunk is None +def test_execute_sql_w_to_dict_list(sessions_database): + sd = _sample_data + row_count = 40 + _set_up_table(sessions_database, row_count) + + with sessions_database.snapshot() as snapshot: + rows = snapshot.execute_sql(sd.SQL).to_dict_list() + all_data_rows = list(_row_data(row_count)) + row_data = [list(row.values()) for row in rows] + sd._check_row_data(row_data, all_data_rows) + assert all(set(row.keys()) == set(sd.COLUMNS) for row in rows) + + def _check_sql_results( database, sql, From ea7f8d574a45365ed1cd9685a56f370159a09e4b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 07:11:13 -0400 Subject: [PATCH 08/13] chore: rename rst files to avoid conflict with service names (#1026) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index dd98abbdee..7f291dbd5f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 0332d3267e..16170d0ca7 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From 2d59dd09b8f14a37c780d8241a76e2f109ba88b0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 11:41:56 -0400 Subject: [PATCH 09/13] feat: add PG.OID type cod annotation (#1023) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.7 PiperOrigin-RevId: 573230664 Source-Link: https://github.com/googleapis/googleapis/commit/93beed334607e70709cc60e6145be65fdc8ec386 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f4a4edaa8057639fcf6adf9179872280d1a8f651 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjRhNGVkYWE4MDU3NjM5ZmNmNmFkZjkxNzk4NzIyODBkMWE4ZjY1MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.8 PiperOrigin-RevId: 574178735 Source-Link: https://github.com/googleapis/googleapis/commit/7307199008ee2d57a4337066de29f9cd8c444bc6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ce3af21b7c559a87c2befc076be0e3aeda3a26f0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2UzYWYyMWI3YzU1OWE4N2MyYmVmYzA3NmJlMGUzYWVkYTNhMjZmMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.9 PiperOrigin-RevId: 574520922 Source-Link: https://github.com/googleapis/googleapis/commit/5183984d611beb41e90f65f08609b9d926f779bd Source-Link: https://github.com/googleapis/googleapis-gen/commit/a59af19d4ac6509faedf1cc39029141b6a5b8968 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTU5YWYxOWQ0YWM2NTA5ZmFlZGYxY2MzOTAyOTE0MWI2YTViODk2OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add PG.OID type cod annotation PiperOrigin-RevId: 577053414 Source-Link: https://github.com/googleapis/googleapis/commit/727c286eca5aa03d3354d6406a67f6a294c15f1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/2015275a7dda2ad3d1609f06c4208125c7de8a9d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjAxNTI3NWE3ZGRhMmFkM2QxNjA5ZjA2YzQyMDgxMjVjN2RlOGE5ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove obsolete rst files --------- Co-authored-by: Owl Bot Co-authored-by: Sri Harsha CH <57220027+harshachinta@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- docs/index.rst | 12 +- .../{services.rst => services_.rst} | 0 .../{types.rst => types_.rst} | 0 .../{services.rst => services_.rst} | 0 .../{types.rst => types_.rst} | 0 .../{services.rst => services_.rst} | 0 docs/spanner_v1/{types.rst => types_.rst} | 0 google/cloud/spanner_v1/types/type.py | 7 + .../test_database_admin.py | 472 +++++++++++------- .../test_instance_admin.py | 108 ++-- tests/unit/gapic/spanner_v1/test_spanner.py | 175 ++++--- 11 files changed, 465 insertions(+), 309 deletions(-) rename docs/spanner_admin_database_v1/{services.rst => services_.rst} (100%) rename docs/spanner_admin_database_v1/{types.rst => types_.rst} (100%) rename docs/spanner_admin_instance_v1/{services.rst => services_.rst} (100%) rename docs/spanner_admin_instance_v1/{types.rst => types_.rst} (100%) rename docs/spanner_v1/{services.rst => services_.rst} (100%) rename docs/spanner_v1/{types.rst => types_.rst} (100%) diff --git a/docs/index.rst b/docs/index.rst index 0e7f24d6e7..92686cc61c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,13 +36,13 @@ API Documentation spanner_v1/transaction spanner_v1/streamed - spanner_v1/services - spanner_v1/types - spanner_admin_database_v1/services - spanner_admin_database_v1/types + spanner_v1/services_ + spanner_v1/types_ + spanner_admin_database_v1/services_ + spanner_admin_database_v1/types_ spanner_admin_database_v1/database_admin - spanner_admin_instance_v1/services - spanner_admin_instance_v1/types + spanner_admin_instance_v1/services_ + spanner_admin_instance_v1/types_ spanner_admin_instance_v1/instance_admin diff --git a/docs/spanner_admin_database_v1/services.rst b/docs/spanner_admin_database_v1/services_.rst similarity index 100% rename from docs/spanner_admin_database_v1/services.rst rename to docs/spanner_admin_database_v1/services_.rst diff --git a/docs/spanner_admin_database_v1/types.rst b/docs/spanner_admin_database_v1/types_.rst similarity index 100% rename from docs/spanner_admin_database_v1/types.rst rename to docs/spanner_admin_database_v1/types_.rst diff --git a/docs/spanner_admin_instance_v1/services.rst b/docs/spanner_admin_instance_v1/services_.rst similarity index 100% rename from docs/spanner_admin_instance_v1/services.rst rename to docs/spanner_admin_instance_v1/services_.rst diff --git a/docs/spanner_admin_instance_v1/types.rst b/docs/spanner_admin_instance_v1/types_.rst similarity index 100% rename from docs/spanner_admin_instance_v1/types.rst rename to docs/spanner_admin_instance_v1/types_.rst diff --git a/docs/spanner_v1/services.rst b/docs/spanner_v1/services_.rst similarity index 100% rename from docs/spanner_v1/services.rst rename to docs/spanner_v1/services_.rst diff --git a/docs/spanner_v1/types.rst b/docs/spanner_v1/types_.rst similarity index 100% rename from docs/spanner_v1/types.rst rename to docs/spanner_v1/types_.rst diff --git a/google/cloud/spanner_v1/types/type.py b/google/cloud/spanner_v1/types/type.py index f3fa94b4a8..f25c465dd4 100644 --- a/google/cloud/spanner_v1/types/type.py +++ b/google/cloud/spanner_v1/types/type.py @@ -137,10 +137,17 @@ class TypeAnnotationCode(proto.Enum): PostgreSQL JSONB values. Currently this annotation is always needed for [JSON][google.spanner.v1.TypeCode.JSON] when a client interacts with PostgreSQL-enabled Spanner databases. + PG_OID (4): + PostgreSQL compatible OID type. This + annotation can be used by a client interacting + with PostgreSQL-enabled Spanner database to + specify that a value should be treated using the + semantics of the OID type. """ TYPE_ANNOTATION_CODE_UNSPECIFIED = 0 PG_NUMERIC = 2 PG_JSONB = 3 + PG_OID = 4 class Type(proto.Message): diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py index 48d5447d37..7a9e9c5d33 100644 --- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -6627,8 +6627,9 @@ def test_list_databases_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6710,10 +6711,9 @@ def test_list_databases_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabasesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6848,8 +6848,9 @@ def test_list_databases_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7253,8 +7254,9 @@ def test_get_database_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7335,8 +7337,9 @@ def test_get_database_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7463,8 +7466,9 @@ def test_get_database_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7555,6 +7559,73 @@ def test_update_database_rest(request_type): "enable_drop_protection": True, "reconciling": True, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7736,43 +7807,6 @@ def test_update_database_rest_bad_request( request_init = { "database": {"name": "projects/sample1/instances/sample2/databases/sample3"} } - request_init["database"] = { - "name": "projects/sample1/instances/sample2/databases/sample3", - "state": 1, - "create_time": {"seconds": 751, "nanos": 543}, - "restore_info": { - "source_type": 1, - "backup_info": { - "backup": "backup_value", - "version_time": {}, - "create_time": {}, - "source_database": "source_database_value", - }, - }, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "encryption_info": [ - { - "encryption_type": 1, - "encryption_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "kms_key_version": "kms_key_version_value", - } - ], - "version_retention_period": "version_retention_period_value", - "earliest_version_time": {}, - "default_leader": "default_leader_value", - "database_dialect": 1, - "enable_drop_protection": True, - "reconciling": True, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8415,8 +8449,9 @@ def test_get_database_ddl_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8491,10 +8526,11 @@ def test_get_database_ddl_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.GetDatabaseDdlResponse.pb( + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8623,8 +8659,9 @@ def test_get_database_ddl_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8690,8 +8727,7 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8768,8 +8804,7 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8900,8 +8935,7 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8967,8 +9001,7 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9045,8 +9078,7 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9169,8 +9201,7 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9235,8 +9266,7 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9316,8 +9346,7 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9451,8 +9480,7 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -9539,6 +9567,73 @@ def test_create_backup_rest(request_type): ], "max_expire_time": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9747,39 +9842,6 @@ def test_create_backup_rest_bad_request( # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/instances/sample2"} - request_init["backup"] = { - "database": "database_value", - "version_time": {"seconds": 751, "nanos": 543}, - "expire_time": {}, - "name": "name_value", - "create_time": {}, - "size_bytes": 1089, - "state": 1, - "referencing_databases": [ - "referencing_databases_value1", - "referencing_databases_value2", - ], - "encryption_info": { - "encryption_type": 1, - "encryption_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "kms_key_version": "kms_key_version_value", - }, - "database_dialect": 1, - "referencing_backups": [ - "referencing_backups_value1", - "referencing_backups_value2", - ], - "max_expire_time": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10175,8 +10237,9 @@ def test_get_backup_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10255,8 +10318,9 @@ def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10377,8 +10441,9 @@ def test_get_backup_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10465,6 +10530,73 @@ def test_update_backup_rest(request_type): ], "max_expire_time": {}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10483,8 +10615,9 @@ def test_update_backup_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10563,8 +10696,9 @@ def test_update_backup_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -10660,39 +10794,6 @@ def test_update_backup_rest_bad_request( request_init = { "backup": {"name": "projects/sample1/instances/sample2/backups/sample3"} } - request_init["backup"] = { - "database": "database_value", - "version_time": {"seconds": 751, "nanos": 543}, - "expire_time": {}, - "name": "projects/sample1/instances/sample2/backups/sample3", - "create_time": {}, - "size_bytes": 1089, - "state": 1, - "referencing_databases": [ - "referencing_databases_value1", - "referencing_databases_value2", - ], - "encryption_info": { - "encryption_type": 1, - "encryption_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "kms_key_version": "kms_key_version_value", - }, - "database_dialect": 1, - "referencing_backups": [ - "referencing_backups_value1", - "referencing_backups_value2", - ], - "max_expire_time": {}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10733,8 +10834,9 @@ def test_update_backup_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11048,8 +11150,9 @@ def test_list_backups_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11130,8 +11233,9 @@ def test_list_backups_rest_required_fields(request_type=backup.ListBackupsReques response_value = Response() response_value.status_code = 200 - pb_return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11263,8 +11367,9 @@ def test_list_backups_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11662,10 +11767,11 @@ def test_list_database_operations_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11748,10 +11854,11 @@ def test_list_database_operations_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -11888,10 +11995,11 @@ def test_list_database_operations_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12014,8 +12122,9 @@ def test_list_backup_operations_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12098,8 +12207,9 @@ def test_list_backup_operations_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12233,8 +12343,9 @@ def test_list_backup_operations_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12356,10 +12467,9 @@ def test_list_database_roles_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabaseRolesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12441,10 +12551,11 @@ def test_list_database_roles_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabaseRolesResponse.pb( + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -12582,10 +12693,9 @@ def test_list_database_roles_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_database_admin.ListDatabaseRolesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py index 7dbdb8a7f5..ac621afc00 100644 --- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -4838,10 +4838,11 @@ def test_list_instance_configs_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4923,10 +4924,11 @@ def test_list_instance_configs_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5062,10 +5064,11 @@ def test_list_instance_configs_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5196,8 +5199,9 @@ def test_get_instance_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5282,8 +5286,9 @@ def test_get_instance_config_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5409,8 +5414,9 @@ def test_get_instance_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6299,10 +6305,11 @@ def test_list_instance_config_operations_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb( + return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6385,12 +6392,13 @@ def test_list_instance_config_operations_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = ( + # Convert return value to protobuf type + return_value = ( spanner_instance_admin.ListInstanceConfigOperationsResponse.pb( return_value ) ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6531,10 +6539,11 @@ def test_list_instance_config_operations_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb( + return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6659,8 +6668,9 @@ def test_list_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6743,10 +6753,9 @@ def test_list_instances_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.ListInstancesResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6882,8 +6891,9 @@ def test_list_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7009,8 +7019,9 @@ def test_get_instance_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7093,8 +7104,9 @@ def test_get_instance_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7219,8 +7231,9 @@ def test_get_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8082,8 +8095,7 @@ def test_set_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8160,8 +8172,7 @@ def test_set_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8290,8 +8301,7 @@ def test_set_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8357,8 +8367,7 @@ def test_get_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8435,8 +8444,7 @@ def test_get_iam_policy_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8557,8 +8565,7 @@ def test_get_iam_policy_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8623,8 +8630,7 @@ def test_test_iam_permissions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8704,8 +8710,7 @@ def test_test_iam_permissions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8837,8 +8842,7 @@ def test_test_iam_permissions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = return_value - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py index 7f593f1953..d136ba902c 100644 --- a/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/tests/unit/gapic/spanner_v1/test_spanner.py @@ -4171,8 +4171,9 @@ def test_create_session_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4247,8 +4248,9 @@ def test_create_session_rest_required_fields(request_type=spanner.CreateSessionR response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4377,8 +4379,9 @@ def test_create_session_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4441,8 +4444,9 @@ def test_batch_create_sessions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4521,8 +4525,9 @@ def test_batch_create_sessions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4656,8 +4661,9 @@ def test_batch_create_sessions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4726,8 +4732,9 @@ def test_get_session_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4801,8 +4808,9 @@ def test_get_session_rest_required_fields(request_type=spanner.GetSessionRequest response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4925,8 +4933,9 @@ def test_get_session_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4991,8 +5000,9 @@ def test_list_sessions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5073,8 +5083,9 @@ def test_list_sessions_rest_required_fields(request_type=spanner.ListSessionsReq response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5206,8 +5217,9 @@ def test_list_sessions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5582,8 +5594,9 @@ def test_execute_sql_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5660,8 +5673,9 @@ def test_execute_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5803,8 +5817,9 @@ def test_execute_streaming_sql_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -5892,8 +5907,9 @@ def test_execute_streaming_sql_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -6038,8 +6054,9 @@ def test_execute_batch_dml_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6118,8 +6135,9 @@ def test_execute_batch_dml_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6262,8 +6280,9 @@ def test_read_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6344,8 +6363,9 @@ def test_read_rest_required_fields(request_type=spanner.ReadRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6489,8 +6509,9 @@ def test_streaming_read_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -6580,8 +6601,9 @@ def test_streaming_read_rest_required_fields(request_type=spanner.ReadRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -6730,8 +6752,9 @@ def test_begin_transaction_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6807,8 +6830,9 @@ def test_begin_transaction_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6948,8 +6972,9 @@ def test_begin_transaction_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7019,8 +7044,9 @@ def test_commit_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7093,8 +7119,9 @@ def test_commit_rest_required_fields(request_type=spanner.CommitRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7222,8 +7249,9 @@ def test_commit_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7565,8 +7593,9 @@ def test_partition_query_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7645,8 +7674,9 @@ def test_partition_query_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7787,8 +7817,9 @@ def test_partition_read_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7865,8 +7896,9 @@ def test_partition_read_rest_required_fields(request_type=spanner.PartitionReadR response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8010,8 +8042,9 @@ def test_batch_write_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) @@ -8092,8 +8125,9 @@ def test_batch_write_rest_required_fields(request_type=spanner.BatchWriteRequest response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") @@ -8239,8 +8273,9 @@ def test_batch_write_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) json_return_value = "[{}]".format(json_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value From 84d662b056ca4bd4177b3107ba463302b5362ff9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 10:10:38 -0400 Subject: [PATCH 10/13] feat(spanner): add directed_read_option in spanner.proto (#1030) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(spanner): add directed_read_option in spanner.proto docs(spanner): updated comment formatting PiperOrigin-RevId: 578551679 Source-Link: https://github.com/googleapis/googleapis/commit/7c80b961d092ff59576df0eba672958b4954bc4b Source-Link: https://github.com/googleapis/googleapis-gen/commit/7b1172ba5e020eaef7de75062a576a11b8e117e4 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2IxMTcyYmE1ZTAyMGVhZWY3ZGU3NTA2MmE1NzZhMTFiOGUxMTdlNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/spanner/async_client.py | 45 +++-- .../spanner_v1/services/spanner/client.py | 45 +++-- google/cloud/spanner_v1/types/__init__.py | 2 + google/cloud/spanner_v1/types/spanner.py | 187 ++++++++++++++++-- scripts/fixup_spanner_v1_keywords.py | 8 +- 5 files changed, 227 insertions(+), 60 deletions(-) diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py index 7c2e950793..371500333e 100644 --- a/google/cloud/spanner_v1/services/spanner/async_client.py +++ b/google/cloud/spanner_v1/services/spanner/async_client.py @@ -422,7 +422,7 @@ async def sample_batch_create_sessions(): Returns: google.cloud.spanner_v1.types.BatchCreateSessionsResponse: The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. """ # Create or coerce a protobuf request object. @@ -1075,8 +1075,10 @@ async def sample_execute_batch_dml(): Returns: google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list - of [ResultSet][google.spanner.v1.ResultSet] messages, + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, one for each DML statement that has successfully executed, in the same order as the statements in the request. If a statement fails, the status in the @@ -1086,34 +1088,35 @@ async def sample_execute_batch_dml(): following approach: 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. - 2. If the status was not OK, check the number of - result sets in the response. If the response - contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. 2. If the status was not OK, check the + number of result sets in the response. If the + response contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. Example 1: - Request: 5 DML statements, all executed successfully. - - Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. + + \* Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. Example 2: - Request: 5 DML statements. The third statement has a syntax error. - - Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement - failed, and the fourth and fifth statements were - not executed. + + \* Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement failed, + and the fourth and fifth statements were not + executed. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py index 03907a1b0b..28f203fff7 100644 --- a/google/cloud/spanner_v1/services/spanner/client.py +++ b/google/cloud/spanner_v1/services/spanner/client.py @@ -669,7 +669,7 @@ def sample_batch_create_sessions(): Returns: google.cloud.spanner_v1.types.BatchCreateSessionsResponse: The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. """ # Create or coerce a protobuf request object. @@ -1279,8 +1279,10 @@ def sample_execute_batch_dml(): Returns: google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list - of [ResultSet][google.spanner.v1.ResultSet] messages, + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, one for each DML statement that has successfully executed, in the same order as the statements in the request. If a statement fails, the status in the @@ -1290,34 +1292,35 @@ def sample_execute_batch_dml(): following approach: 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. - 2. If the status was not OK, check the number of - result sets in the response. If the response - contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. 2. If the status was not OK, check the + number of result sets in the response. If the + response contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. Example 1: - Request: 5 DML statements, all executed successfully. - - Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. + + \* Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. Example 2: - Request: 5 DML statements. The third statement has a syntax error. - - Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement - failed, and the fourth and fifth statements were - not executed. + + \* Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement failed, + and the fourth and fifth statements were not + executed. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/spanner_v1/types/__init__.py b/google/cloud/spanner_v1/types/__init__.py index f4f619f6c4..52b485d976 100644 --- a/google/cloud/spanner_v1/types/__init__.py +++ b/google/cloud/spanner_v1/types/__init__.py @@ -42,6 +42,7 @@ CommitRequest, CreateSessionRequest, DeleteSessionRequest, + DirectedReadOptions, ExecuteBatchDmlRequest, ExecuteBatchDmlResponse, ExecuteSqlRequest, @@ -89,6 +90,7 @@ "CommitRequest", "CreateSessionRequest", "DeleteSessionRequest", + "DirectedReadOptions", "ExecuteBatchDmlRequest", "ExecuteBatchDmlResponse", "ExecuteSqlRequest", diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py index dfd83ac165..3dbacbe26b 100644 --- a/google/cloud/spanner_v1/types/spanner.py +++ b/google/cloud/spanner_v1/types/spanner.py @@ -41,6 +41,7 @@ "ListSessionsResponse", "DeleteSessionRequest", "RequestOptions", + "DirectedReadOptions", "ExecuteSqlRequest", "ExecuteBatchDmlRequest", "ExecuteBatchDmlResponse", @@ -381,6 +382,150 @@ class Priority(proto.Enum): ) +class DirectedReadOptions(proto.Message): + r"""The DirectedReadOptions can be used to indicate which replicas or + regions should be used for non-transactional reads or queries. + + DirectedReadOptions may only be specified for a read-only + transaction, otherwise the API will return an ``INVALID_ARGUMENT`` + error. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.IncludeReplicas): + Include_replicas indicates the order of replicas (as they + appear in this list) to process the request. If + auto_failover_disabled is set to true and all replicas are + exhausted without finding a healthy replica, Spanner will + wait for a replica in the list to become available, requests + may fail due to ``DEADLINE_EXCEEDED`` errors. + + This field is a member of `oneof`_ ``replicas``. + exclude_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.ExcludeReplicas): + Exclude_replicas indicates that should be excluded from + serving requests. Spanner will not route requests to the + replicas in this list. + + This field is a member of `oneof`_ ``replicas``. + """ + + class ReplicaSelection(proto.Message): + r"""The directed read replica selector. Callers must provide one or more + of the following fields for replica selection: + + - ``location`` - The location must be one of the regions within the + multi-region configuration of your database. + - ``type`` - The type of the replica. + + Some examples of using replica_selectors are: + + - ``location:us-east1`` --> The "us-east1" replica(s) of any + available type will be used to process the request. + - ``type:READ_ONLY`` --> The "READ_ONLY" type replica(s) in nearest + . available location will be used to process the request. + - ``location:us-east1 type:READ_ONLY`` --> The "READ_ONLY" type + replica(s) in location "us-east1" will be used to process the + request. + + Attributes: + location (str): + The location or region of the serving + requests, e.g. "us-east1". + type_ (google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection.Type): + The type of replica. + """ + + class Type(proto.Enum): + r"""Indicates the type of replica. + + Values: + TYPE_UNSPECIFIED (0): + Not specified. + READ_WRITE (1): + Read-write replicas support both reads and + writes. + READ_ONLY (2): + Read-only replicas only support reads (not + writes). + """ + TYPE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "DirectedReadOptions.ReplicaSelection.Type" = proto.Field( + proto.ENUM, + number=2, + enum="DirectedReadOptions.ReplicaSelection.Type", + ) + + class IncludeReplicas(proto.Message): + r"""An IncludeReplicas contains a repeated set of + ReplicaSelection which indicates the order in which replicas + should be considered. + + Attributes: + replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): + The directed read replica selector. + auto_failover_disabled (bool): + If true, Spanner will not route requests to a replica + outside the include_replicas list when all of the specified + replicas are unavailable or unhealthy. Default value is + ``false``. + """ + + replica_selections: MutableSequence[ + "DirectedReadOptions.ReplicaSelection" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DirectedReadOptions.ReplicaSelection", + ) + auto_failover_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class ExcludeReplicas(proto.Message): + r"""An ExcludeReplicas contains a repeated set of + ReplicaSelection that should be excluded from serving requests. + + Attributes: + replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): + The directed read replica selector. + """ + + replica_selections: MutableSequence[ + "DirectedReadOptions.ReplicaSelection" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DirectedReadOptions.ReplicaSelection", + ) + + include_replicas: IncludeReplicas = proto.Field( + proto.MESSAGE, + number=1, + oneof="replicas", + message=IncludeReplicas, + ) + exclude_replicas: ExcludeReplicas = proto.Field( + proto.MESSAGE, + number=2, + oneof="replicas", + message=ExcludeReplicas, + ) + + class ExecuteSqlRequest(proto.Message): r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and @@ -481,14 +626,16 @@ class ExecuteSqlRequest(proto.Message): given query. request_options (google.cloud.spanner_v1.types.RequestOptions): Common options for this request. + directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): + Directed read options for this request. data_boost_enabled (bool): If this is for a partitioned query and this field is set to - ``true``, the request will be executed via Spanner + ``true``, the request is executed with Spanner Data Boost independent compute resources. If the field is set to ``true`` but the request does not set - ``partition_token``, the API will return an - ``INVALID_ARGUMENT`` error. + ``partition_token``, the API returns an ``INVALID_ARGUMENT`` + error. """ class QueryMode(proto.Enum): @@ -628,6 +775,11 @@ class QueryOptions(proto.Message): number=11, message="RequestOptions", ) + directed_read_options: "DirectedReadOptions" = proto.Field( + proto.MESSAGE, + number=15, + message="DirectedReadOptions", + ) data_boost_enabled: bool = proto.Field( proto.BOOL, number=16, @@ -870,14 +1022,14 @@ class PartitionQueryRequest(proto.Message): sql (str): Required. The query request to generate partitions for. The request will fail if the query is not root partitionable. - The query plan of a root partitionable query has a single - distributed union operator. A distributed union operator - conceptually divides one or more tables into multiple - splits, remotely evaluates a subquery independently on each - split, and then unions all results. - - This must not contain DML commands, such as INSERT, UPDATE, - or DELETE. Use + For a query to be root partitionable, it needs to satisfy a + few conditions. For example, the first operator in the query + execution plan must be a distributed union operator. For + more information about other conditions, see `Read data in + parallel `__. + + The query request must not contain DML commands, such as + INSERT, UPDATE, or DELETE. Use [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] with a PartitionedDml transaction for large, partition-friendly DML operations. @@ -1142,14 +1294,16 @@ class ReadRequest(proto.Message): create this partition_token. request_options (google.cloud.spanner_v1.types.RequestOptions): Common options for this request. + directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): + Directed read options for this request. data_boost_enabled (bool): If this is for a partitioned read and this field is set to - ``true``, the request will be executed via Spanner + ``true``, the request is executed with Spanner Data Boost independent compute resources. If the field is set to ``true`` but the request does not set - ``partition_token``, the API will return an - ``INVALID_ARGUMENT`` error. + ``partition_token``, the API returns an ``INVALID_ARGUMENT`` + error. """ session: str = proto.Field( @@ -1195,6 +1349,11 @@ class ReadRequest(proto.Message): number=11, message="RequestOptions", ) + directed_read_options: "DirectedReadOptions" = proto.Field( + proto.MESSAGE, + number=14, + message="DirectedReadOptions", + ) data_boost_enabled: bool = proto.Field( proto.BOOL, number=15, diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py index b1ba4084df..f79f70b2dd 100644 --- a/scripts/fixup_spanner_v1_keywords.py +++ b/scripts/fixup_spanner_v1_keywords.py @@ -46,15 +46,15 @@ class spannerCallTransformer(cst.CSTTransformer): 'create_session': ('database', 'session', ), 'delete_session': ('name', ), 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', ), - 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'data_boost_enabled', ), - 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'data_boost_enabled', ), + 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', ), + 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', ), 'get_session': ('name', ), 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'data_boost_enabled', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', ), 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'data_boost_enabled', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: From 38d62b275d472b26c4ce5df029b3a2ab39cc712c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 3 Nov 2023 07:31:12 -0400 Subject: [PATCH 11/13] chore: update docfx minimum Python version (#1031) Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7f291dbd5f..ec696b558c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/noxfile.py b/noxfile.py index e1677c220b..b1274090f0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -344,7 +344,7 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" From e5acb568c276063d45a6db58d0744e8e59a6afce Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 12:49:12 -0500 Subject: [PATCH 12/13] chore: bump urllib3 from 1.26.12 to 1.26.18 (#1033) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/requirements.txt | 532 ++++++++++++++++++++------------------ 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ec696b558c..453b540c1e 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 16170d0ca7..8957e21104 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 07fbc45156a1b42a5e61c9c4b09923f239729aa8 Mon Sep 17 00:00:00 2001 From: Ankit Agarwal <146331865+ankiaga@users.noreply.github.com> Date: Fri, 17 Nov 2023 13:48:17 +0530 Subject: [PATCH 13/13] fix: Executing existing DDL statements on executemany statement execution (#1032) * Executing existing DDL statements on executemany statement execution * Fixing test * Added more tests and resolved comments * Fixing test * Resolved comments --- google/cloud/spanner_dbapi/cursor.py | 4 + tests/system/test_dbapi.py | 151 ++++++++++++++++++++++++++- 2 files changed, 154 insertions(+), 1 deletion(-) diff --git a/google/cloud/spanner_dbapi/cursor.py b/google/cloud/spanner_dbapi/cursor.py index 91bccedd4c..330aeb2c72 100644 --- a/google/cloud/spanner_dbapi/cursor.py +++ b/google/cloud/spanner_dbapi/cursor.py @@ -315,6 +315,10 @@ def executemany(self, operation, seq_of_params): "Executing DDL statements with executemany() method is not allowed." ) + # For every operation, we've got to ensure that any prior DDL + # statements were run. + self.connection.run_prior_DDL_statements() + many_result_set = StreamedManyResultSets() if class_ in (parse_utils.STMT_INSERT, parse_utils.STMT_UPDATING): diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py index 29617ad614..f3c5da1f46 100644 --- a/tests/system/test_dbapi.py +++ b/tests/system/test_dbapi.py @@ -27,7 +27,6 @@ from google.cloud.spanner_v1 import gapic_version as package_version from . import _helpers - DATABASE_NAME = "dbapi-txn" DDL_STATEMENTS = ( @@ -344,6 +343,156 @@ def test_DDL_autocommit(shared_instance, dbapi_database): op.result() +def test_ddl_execute_autocommit_true(shared_instance, dbapi_database): + """Check that DDL statement in autocommit mode results in successful + DDL statement execution for execute method.""" + + conn = Connection(shared_instance, dbapi_database) + conn.autocommit = True + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE DdlExecuteAutocommit ( + SingerId INT64 NOT NULL, + Name STRING(1024), + ) PRIMARY KEY (SingerId) + """ + ) + table = dbapi_database.table("DdlExecuteAutocommit") + assert table.exists() is True + + cur.close() + conn.close() + + +def test_ddl_executemany_autocommit_true(shared_instance, dbapi_database): + """Check that DDL statement in autocommit mode results in exception for + executemany method .""" + + conn = Connection(shared_instance, dbapi_database) + conn.autocommit = True + cur = conn.cursor() + with pytest.raises(ProgrammingError): + cur.executemany( + """ + CREATE TABLE DdlExecuteManyAutocommit ( + SingerId INT64 NOT NULL, + Name STRING(1024), + ) PRIMARY KEY (SingerId) + """, + [], + ) + table = dbapi_database.table("DdlExecuteManyAutocommit") + assert table.exists() is False + + cur.close() + conn.close() + + +def test_ddl_executemany_autocommit_false(shared_instance, dbapi_database): + """Check that DDL statement in non-autocommit mode results in exception for + executemany method .""" + + conn = Connection(shared_instance, dbapi_database) + cur = conn.cursor() + with pytest.raises(ProgrammingError): + cur.executemany( + """ + CREATE TABLE DdlExecuteManyAutocommit ( + SingerId INT64 NOT NULL, + Name STRING(1024), + ) PRIMARY KEY (SingerId) + """, + [], + ) + table = dbapi_database.table("DdlExecuteManyAutocommit") + assert table.exists() is False + + cur.close() + conn.close() + + +def test_ddl_execute(shared_instance, dbapi_database): + """Check that DDL statement followed by non-DDL execute statement in + non autocommit mode results in successful DDL statement execution.""" + + conn = Connection(shared_instance, dbapi_database) + want_row = ( + 1, + "first-name", + ) + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE DdlExecute ( + SingerId INT64 NOT NULL, + Name STRING(1024), + ) PRIMARY KEY (SingerId) + """ + ) + table = dbapi_database.table("DdlExecute") + assert table.exists() is False + + cur.execute( + """ + INSERT INTO DdlExecute (SingerId, Name) + VALUES (1, "first-name") + """ + ) + assert table.exists() is True + conn.commit() + + # read the resulting data from the database + cur.execute("SELECT * FROM DdlExecute") + got_rows = cur.fetchall() + + assert got_rows == [want_row] + + cur.close() + conn.close() + + +def test_ddl_executemany(shared_instance, dbapi_database): + """Check that DDL statement followed by non-DDL executemany statement in + non autocommit mode results in successful DDL statement execution.""" + + conn = Connection(shared_instance, dbapi_database) + want_row = ( + 1, + "first-name", + ) + cur = conn.cursor() + cur.execute( + """ + CREATE TABLE DdlExecuteMany ( + SingerId INT64 NOT NULL, + Name STRING(1024), + ) PRIMARY KEY (SingerId) + """ + ) + table = dbapi_database.table("DdlExecuteMany") + assert table.exists() is False + + cur.executemany( + """ + INSERT INTO DdlExecuteMany (SingerId, Name) + VALUES (%s, %s) + """, + [want_row], + ) + assert table.exists() is True + conn.commit() + + # read the resulting data from the database + cur.execute("SELECT * FROM DdlExecuteMany") + got_rows = cur.fetchall() + + assert got_rows == [want_row] + + cur.close() + conn.close() + + @pytest.mark.skipif(_helpers.USE_EMULATOR, reason="Emulator does not support json.") def test_autocommit_with_json_data(shared_instance, dbapi_database): """