From 4ab01569ce6c799f78a7f667810b81ea4f28825f Mon Sep 17 00:00:00 2001 From: Owlbot Bootstrapper Date: Tue, 26 Nov 2024 12:26:30 +0000 Subject: [PATCH 1/6] feat: initial commit From 3a0589ad9cee6eefe9254e2eb21cf6dc0a0c9a29 Mon Sep 17 00:00:00 2001 From: Owlbot Bootstrapper Date: Tue, 26 Nov 2024 12:27:43 +0000 Subject: [PATCH 2/6] feat: initial generation of library Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLW1lbW9yeXN0b3JlLy5Pd2xCb3QueWFtbCIsImgiOiI5YTZiNTAzN2E5ZjYwZTcyYmJiYmRjNjYzMTQ3NjE2ZTAzY2RlOWMzIn0= --- packages/google-cloud-memorystore/.OwlBot.yaml | 18 ++++++++++++++++++ .../.repo-metadata.json | 17 +++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 packages/google-cloud-memorystore/.OwlBot.yaml create mode 100644 packages/google-cloud-memorystore/.repo-metadata.json diff --git a/packages/google-cloud-memorystore/.OwlBot.yaml b/packages/google-cloud-memorystore/.OwlBot.yaml new file mode 100644 index 000000000000..a8f64c93a59c --- /dev/null +++ b/packages/google-cloud-memorystore/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +deep-copy-regex: + - source: /google/cloud/memorystore/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-memorystore/$1 +api-name: google-cloud-memorystore diff --git a/packages/google-cloud-memorystore/.repo-metadata.json b/packages/google-cloud-memorystore/.repo-metadata.json new file mode 100644 index 000000000000..4739821ab93b --- /dev/null +++ b/packages/google-cloud-memorystore/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-memorystore", + "name_pretty": "", + "api_description": "", + "product_documentation": "", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-memorystore", + "api_id": "memorystore.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "memorystore" +} From 9cb7431156b6c8e6021308c387f095a6509e77e5 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 26 Nov 2024 12:38:50 +0000 Subject: [PATCH 3/6] Owl Bot copied code from https://github.com/googleapis/googleapis-gen/commit/9a6b5037a9f60e72bbbbdc663147616e03cde9c3 --- .../google-cloud-memorystore/v1/.coveragerc | 13 + .../google-cloud-memorystore/v1/.flake8 | 33 + .../google-cloud-memorystore/v1/MANIFEST.in | 2 + .../google-cloud-memorystore/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../google-cloud-memorystore/v1/docs/conf.py | 376 ++ .../v1/docs/index.rst | 7 + .../v1/docs/memorystore_v1/memorystore.rst | 10 + .../v1/docs/memorystore_v1/services_.rst | 6 + .../v1/docs/memorystore_v1/types_.rst | 6 + .../v1/google/cloud/memorystore/__init__.py | 61 + .../google/cloud/memorystore/gapic_version.py | 16 + .../v1/google/cloud/memorystore/py.typed | 2 + .../google/cloud/memorystore_v1/__init__.py | 62 + .../cloud/memorystore_v1/gapic_metadata.json | 48 + .../cloud/memorystore_v1/gapic_version.py | 16 + .../v1/google/cloud/memorystore_v1/py.typed | 2 + .../cloud/memorystore_v1/services/__init__.py | 15 + .../services/memorystore/__init__.py | 20 + .../services/memorystore/client.py | 1636 ++++++++ .../services/memorystore/pagers.py | 93 + .../memorystore/transports/README.rst | 9 + .../memorystore/transports/__init__.py | 32 + .../services/memorystore/transports/base.py | 341 ++ .../services/memorystore/transports/rest.py | 1406 +++++++ .../memorystore/transports/rest_base.py | 485 +++ .../cloud/memorystore_v1/types/__init__.py | 56 + .../cloud/memorystore_v1/types/memorystore.py | 1225 ++++++ .../google-cloud-memorystore/v1/mypy.ini | 3 + .../google-cloud-memorystore/v1/noxfile.py | 280 ++ ...erated_memorystore_create_instance_sync.py | 63 + ...erated_memorystore_delete_instance_sync.py | 56 + ...orystore_get_certificate_authority_sync.py | 52 + ...generated_memorystore_get_instance_sync.py | 52 + ...nerated_memorystore_list_instances_sync.py | 53 + ...erated_memorystore_update_instance_sync.py | 61 + ..._metadata_google.cloud.memorystore.v1.json | 507 +++ .../scripts/fixup_memorystore_v1_keywords.py | 181 + .../google-cloud-memorystore/v1/setup.py | 98 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.13.txt | 6 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/memorystore_v1/__init__.py | 16 + .../gapic/memorystore_v1/test_memorystore.py | 3401 +++++++++++++++++ .../v1beta/.coveragerc | 13 + .../google-cloud-memorystore/v1beta/.flake8 | 33 + .../v1beta/MANIFEST.in | 2 + .../v1beta/README.rst | 49 + .../v1beta/docs/_static/custom.css | 3 + .../v1beta/docs/conf.py | 376 ++ .../v1beta/docs/index.rst | 7 + .../docs/memorystore_v1beta/memorystore.rst | 10 + .../docs/memorystore_v1beta/services_.rst | 6 + .../v1beta/docs/memorystore_v1beta/types_.rst | 6 + .../google/cloud/memorystore/__init__.py | 61 + .../google/cloud/memorystore/gapic_version.py | 16 + .../v1beta/google/cloud/memorystore/py.typed | 2 + .../cloud/memorystore_v1beta/__init__.py | 62 + .../memorystore_v1beta/gapic_metadata.json | 48 + .../cloud/memorystore_v1beta/gapic_version.py | 16 + .../google/cloud/memorystore_v1beta/py.typed | 2 + .../memorystore_v1beta/services/__init__.py | 15 + .../services/memorystore/__init__.py | 20 + .../services/memorystore/client.py | 1636 ++++++++ .../services/memorystore/pagers.py | 93 + .../memorystore/transports/README.rst | 9 + .../memorystore/transports/__init__.py | 32 + .../services/memorystore/transports/base.py | 341 ++ .../services/memorystore/transports/rest.py | 1406 +++++++ .../memorystore/transports/rest_base.py | 485 +++ .../memorystore_v1beta/types/__init__.py | 56 + .../memorystore_v1beta/types/memorystore.py | 1225 ++++++ .../google-cloud-memorystore/v1beta/mypy.ini | 3 + .../v1beta/noxfile.py | 280 ++ ...erated_memorystore_create_instance_sync.py | 63 + ...erated_memorystore_delete_instance_sync.py | 56 + ...orystore_get_certificate_authority_sync.py | 52 + ...generated_memorystore_get_instance_sync.py | 52 + ...nerated_memorystore_list_instances_sync.py | 53 + ...erated_memorystore_update_instance_sync.py | 61 + ...adata_google.cloud.memorystore.v1beta.json | 507 +++ .../fixup_memorystore_v1beta_keywords.py | 181 + .../google-cloud-memorystore/v1beta/setup.py | 98 + .../v1beta/testing/constraints-3.10.txt | 6 + .../v1beta/testing/constraints-3.11.txt | 6 + .../v1beta/testing/constraints-3.12.txt | 6 + .../v1beta/testing/constraints-3.13.txt | 6 + .../v1beta/testing/constraints-3.7.txt | 10 + .../v1beta/testing/constraints-3.8.txt | 6 + .../v1beta/testing/constraints-3.9.txt | 6 + .../v1beta/tests/__init__.py | 16 + .../v1beta/tests/unit/__init__.py | 16 + .../v1beta/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/memorystore_v1beta/__init__.py | 16 + .../memorystore_v1beta/test_memorystore.py | 3401 +++++++++++++++++ 102 files changed, 21894 insertions(+) create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/README.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/setup.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py create mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/.coveragerc b/owl-bot-staging/google-cloud-memorystore/v1/.coveragerc new file mode 100644 index 000000000000..90ec0ce4fe89 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/memorystore/__init__.py + google/cloud/memorystore/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-memorystore/v1/.flake8 b/owl-bot-staging/google-cloud-memorystore/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in b/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in new file mode 100644 index 000000000000..cb2b6f08702d --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/memorystore *.py +recursive-include google/cloud/memorystore_v1 *.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/README.rst b/owl-bot-staging/google-cloud-memorystore/v1/README.rst new file mode 100644 index 000000000000..6f935a43af2b --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Memorystore API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Memorystore API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py b/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py new file mode 100644 index 000000000000..8d134830ab88 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-memorystore documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-memorystore" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-memorystore-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-memorystore.tex", + u"google-cloud-memorystore Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-memorystore", + u"Google Cloud Memorystore Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-memorystore", + u"google-cloud-memorystore Documentation", + author, + "google-cloud-memorystore", + "GAPIC library for Google Cloud Memorystore API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst b/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst new file mode 100644 index 000000000000..bd30847b6cc3 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + memorystore_v1/services_ + memorystore_v1/types_ diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst b/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst new file mode 100644 index 000000000000..e60261baa130 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst @@ -0,0 +1,10 @@ +Memorystore +----------------------------- + +.. automodule:: google.cloud.memorystore_v1.services.memorystore + :members: + :inherited-members: + +.. automodule:: google.cloud.memorystore_v1.services.memorystore.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst b/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst new file mode 100644 index 000000000000..644aef45922a --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Memorystore v1 API +============================================ +.. toctree:: + :maxdepth: 2 + + memorystore diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst b/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst new file mode 100644 index 000000000000..478ab20557b5 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Memorystore v1 API +========================================= + +.. automodule:: google.cloud.memorystore_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py new file mode 100644 index 000000000000..6af94ce66397 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.memorystore_v1.services.memorystore.client import MemorystoreClient + +from google.cloud.memorystore_v1.types.memorystore import CertificateAuthority +from google.cloud.memorystore_v1.types.memorystore import CreateInstanceRequest +from google.cloud.memorystore_v1.types.memorystore import DeleteInstanceRequest +from google.cloud.memorystore_v1.types.memorystore import DiscoveryEndpoint +from google.cloud.memorystore_v1.types.memorystore import GetCertificateAuthorityRequest +from google.cloud.memorystore_v1.types.memorystore import GetInstanceRequest +from google.cloud.memorystore_v1.types.memorystore import Instance +from google.cloud.memorystore_v1.types.memorystore import ListInstancesRequest +from google.cloud.memorystore_v1.types.memorystore import ListInstancesResponse +from google.cloud.memorystore_v1.types.memorystore import NodeConfig +from google.cloud.memorystore_v1.types.memorystore import OperationMetadata +from google.cloud.memorystore_v1.types.memorystore import PersistenceConfig +from google.cloud.memorystore_v1.types.memorystore import PscAutoConnection +from google.cloud.memorystore_v1.types.memorystore import PscConnection +from google.cloud.memorystore_v1.types.memorystore import UpdateInstanceRequest +from google.cloud.memorystore_v1.types.memorystore import ZoneDistributionConfig +from google.cloud.memorystore_v1.types.memorystore import ConnectionType +from google.cloud.memorystore_v1.types.memorystore import PscConnectionStatus + +__all__ = ('MemorystoreClient', + 'CertificateAuthority', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'DiscoveryEndpoint', + 'GetCertificateAuthorityRequest', + 'GetInstanceRequest', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'NodeConfig', + 'OperationMetadata', + 'PersistenceConfig', + 'PscAutoConnection', + 'PscConnection', + 'UpdateInstanceRequest', + 'ZoneDistributionConfig', + 'ConnectionType', + 'PscConnectionStatus', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed new file mode 100644 index 000000000000..3e10cbb3572e --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-memorystore package uses inline types. diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py new file mode 100644 index 000000000000..5ad10e820927 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.memorystore import MemorystoreClient + +from .types.memorystore import CertificateAuthority +from .types.memorystore import CreateInstanceRequest +from .types.memorystore import DeleteInstanceRequest +from .types.memorystore import DiscoveryEndpoint +from .types.memorystore import GetCertificateAuthorityRequest +from .types.memorystore import GetInstanceRequest +from .types.memorystore import Instance +from .types.memorystore import ListInstancesRequest +from .types.memorystore import ListInstancesResponse +from .types.memorystore import NodeConfig +from .types.memorystore import OperationMetadata +from .types.memorystore import PersistenceConfig +from .types.memorystore import PscAutoConnection +from .types.memorystore import PscConnection +from .types.memorystore import UpdateInstanceRequest +from .types.memorystore import ZoneDistributionConfig +from .types.memorystore import ConnectionType +from .types.memorystore import PscConnectionStatus + +__all__ = ( +'CertificateAuthority', +'ConnectionType', +'CreateInstanceRequest', +'DeleteInstanceRequest', +'DiscoveryEndpoint', +'GetCertificateAuthorityRequest', +'GetInstanceRequest', +'Instance', +'ListInstancesRequest', +'ListInstancesResponse', +'MemorystoreClient', +'NodeConfig', +'OperationMetadata', +'PersistenceConfig', +'PscAutoConnection', +'PscConnection', +'PscConnectionStatus', +'UpdateInstanceRequest', +'ZoneDistributionConfig', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json new file mode 100644 index 000000000000..b33f1e105163 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json @@ -0,0 +1,48 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.memorystore_v1", + "protoPackage": "google.cloud.memorystore.v1", + "schema": "1.0", + "services": { + "Memorystore": { + "clients": { + "rest": { + "libraryClient": "MemorystoreClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetCertificateAuthority": { + "methods": [ + "get_certificate_authority" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed new file mode 100644 index 000000000000..3e10cbb3572e --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-memorystore package uses inline types. diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py new file mode 100644 index 000000000000..e2240b4bffb7 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MemorystoreClient + +__all__ = ( + 'MemorystoreClient', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py new file mode 100644 index 000000000000..5dfc5b0d8992 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py @@ -0,0 +1,1636 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.memorystore_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1.services.memorystore import pagers +from google.cloud.memorystore_v1.types import memorystore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MemorystoreTransport, DEFAULT_CLIENT_INFO +from .transports.rest import MemorystoreRestTransport + + +class MemorystoreClientMeta(type): + """Metaclass for the Memorystore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] + _transport_registry["rest"] = MemorystoreRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MemorystoreTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MemorystoreClient(metaclass=MemorystoreClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "memorystore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "memorystore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MemorystoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MemorystoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MemorystoreTransport: + """Returns the transport used by the client instance. + + Returns: + MemorystoreTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def certificate_authority_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified certificate_authority string.""" + return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_certificate_authority_path(path: str) -> Dict[str,str]: + """Parses a certificate_authority path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", path) + return m.groupdict() if m else {} + + @staticmethod + def forwarding_rule_path(project: str,region: str,forwarding_rule: str,) -> str: + """Returns a fully-qualified forwarding_rule string.""" + return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + + @staticmethod + def parse_forwarding_rule_path(path: str) -> Dict[str,str]: + """Parses a forwarding_rule path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def network_path(project: str,network: str,) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str,str]: + """Parses a network path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_attachment_path(project: str,region: str,service_attachment: str,) -> str: + """Returns a fully-qualified service_attachment string.""" + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + + @staticmethod + def parse_service_attachment_path(path: str) -> Dict[str,str]: + """Parses a service_attachment path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = MemorystoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = MemorystoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MemorystoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the memorystore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MemorystoreTransport,Callable[..., MemorystoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MemorystoreTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MemorystoreClient._read_environment_variables() + self._client_cert_source = MemorystoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = MemorystoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MemorystoreTransport) + if transport_provided: + # transport is a MemorystoreTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MemorystoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + MemorystoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[MemorystoreTransport], Callable[..., MemorystoreTransport]] = ( + MemorystoreClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MemorystoreTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_instances(self, + request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_list_instances(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.ListInstancesRequest, dict]): + The request object. Request message for [ListInstances][]. + parent (str): + Required. The parent to list + instances from. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memorystore_v1.services.memorystore.pagers.ListInstancesPager: + Response message for [ListInstances][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.ListInstancesRequest): + request = memorystore.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_get_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.GetInstanceRequest, dict]): + The request object. Request message for [GetInstance][]. + name (str): + Required. The name of the instance to + retrieve. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memorystore_v1.types.Instance: + A Memorystore instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.GetInstanceRequest): + request = memorystore.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[memorystore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_create_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.CreateInstanceRequest, dict]): + The request object. Request message for [CreateInstance][]. + parent (str): + Required. The parent resource where + this instance will be created. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.memorystore_v1.types.Instance): + Required. The instance to create. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The ID to use for the instance, which will + become the final component of the instance's resource + name. + + This value is subject to the following restrictions: + + - Must be 4-63 characters in length + - Must begin with a letter or digit + - Must contain only lowercase letters, digits, and + hyphens + - Must not end with a hyphen + - Must be unique within a location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1.types.Instance` A + Memorystore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.CreateInstanceRequest): + request = memorystore.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Instance, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[memorystore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_update_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.UpdateInstanceRequest, dict]): + The request object. Request message for [UpdateInstance][]. + instance (google.cloud.memorystore_v1.types.Instance): + Required. The instance to update. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be + updated on the instance. At least one + field must be specified. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1.types.Instance` A + Memorystore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.UpdateInstanceRequest): + request = memorystore.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Instance, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_delete_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.DeleteInstanceRequest, dict]): + The request object. Request message for [DeleteInstance][]. + name (str): + Required. The name of the instance to + delete. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.DeleteInstanceRequest): + request = memorystore.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_certificate_authority(self, + request: Optional[Union[memorystore.GetCertificateAuthorityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: + r"""Gets details about the certificate authority for an + Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_get_certificate_authority(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.GetCertificateAuthorityRequest, dict]): + The request object. Request message for [GetCertificateAuthority][]. + name (str): + Required. The name of the certificate + authority. Format: + + projects/{project}/locations/{location}/instances/{instance}/certificateAuthority + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memorystore_v1.types.CertificateAuthority: + A certificate authority for an + instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.GetCertificateAuthorityRequest): + request = memorystore.GetCertificateAuthorityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_certificate_authority] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MemorystoreClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MemorystoreClient", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py new file mode 100644 index 000000000000..025c07bfda1e --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.memorystore_v1.types import memorystore + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memorystore_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memorystore_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., memorystore.ListInstancesResponse], + request: memorystore.ListInstancesRequest, + response: memorystore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memorystore_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.memorystore_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = memorystore.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[memorystore.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[memorystore.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst new file mode 100644 index 000000000000..1ed25b9404df --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MemorystoreTransport` is the ABC for all transports. +- public child `MemorystoreGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MemorystoreGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMemorystoreRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MemorystoreRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py new file mode 100644 index 000000000000..6172c94a25d4 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MemorystoreTransport +from .rest import MemorystoreRestTransport +from .rest import MemorystoreRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] +_transport_registry['rest'] = MemorystoreRestTransport + +__all__ = ( + 'MemorystoreTransport', + 'MemorystoreRestTransport', + 'MemorystoreRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py new file mode 100644 index 000000000000..216396a3b63e --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.memorystore_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1.types import memorystore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MemorystoreTransport(abc.ABC): + """Abstract transport class for Memorystore.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'memorystore.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'memorystore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_certificate_authority: gapic_v1.method.wrap_method( + self.get_certificate_authority, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [memorystore.ListInstancesRequest], + Union[ + memorystore.ListInstancesResponse, + Awaitable[memorystore.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [memorystore.GetInstanceRequest], + Union[ + memorystore.Instance, + Awaitable[memorystore.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [memorystore.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [memorystore.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [memorystore.DeleteInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_certificate_authority(self) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + Union[ + memorystore.CertificateAuthority, + Awaitable[memorystore.CertificateAuthority] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MemorystoreTransport', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py new file mode 100644 index 000000000000..d990c453e173 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py @@ -0,0 +1,1406 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.memorystore_v1.types import memorystore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseMemorystoreRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class MemorystoreRestInterceptor: + """Interceptor for Memorystore. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MemorystoreRestTransport. + + .. code-block:: python + class MyCustomMemorystoreInterceptor(MemorystoreRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_certificate_authority(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_certificate_authority(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MemorystoreRestTransport(interceptor=MyCustomMemorystoreInterceptor()) + client = MemorystoreClient(transport=transport) + + + """ + def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_certificate_authority(self, request: memorystore.GetCertificateAuthorityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_certificate_authority + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_certificate_authority(self, response: memorystore.CertificateAuthority) -> memorystore.CertificateAuthority: + """Post-rpc interceptor for get_certificate_authority + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_instance(self, request: memorystore.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_instance(self, response: memorystore.Instance) -> memorystore.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_list_instances(self, response: memorystore.ListInstancesResponse) -> memorystore.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MemorystoreRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MemorystoreRestInterceptor + + +class MemorystoreRestTransport(_BaseMemorystoreRestTransport): + """REST backend synchronous transport for Memorystore. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'memorystore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MemorystoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'memorystore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MemorystoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(_BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: memorystore.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.memorystore.CreateInstanceRequest): + The request object. Request message for [CreateInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + request, metadata = self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(_BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.memorystore.DeleteInstanceRequest): + The request object. Request message for [DeleteInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _GetCertificateAuthority(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetCertificateAuthority") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.GetCertificateAuthorityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> memorystore.CertificateAuthority: + r"""Call the get certificate authority method over HTTP. + + Args: + request (~.memorystore.GetCertificateAuthorityRequest): + The request object. Request message for [GetCertificateAuthority][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.memorystore.CertificateAuthority: + A certificate authority for an + instance. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + request, metadata = self._interceptor.pre_get_certificate_authority(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetCertificateAuthority._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.CertificateAuthority() + pb_resp = memorystore.CertificateAuthority.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_certificate_authority(resp) + return resp + + class _GetInstance(_BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> memorystore.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.memorystore.GetInstanceRequest): + The request object. Request message for [GetInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.memorystore.Instance: + A Memorystore instance. + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + request, metadata = self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.Instance() + pb_resp = memorystore.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListInstances(_BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.ListInstances") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> memorystore.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.memorystore.ListInstancesRequest): + The request object. Request message for [ListInstances][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.memorystore.ListInstancesResponse: + Response message for [ListInstances][]. + """ + + http_options = _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + request, metadata = self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.ListInstancesResponse() + pb_resp = memorystore.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _UpdateInstance(_BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.UpdateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: memorystore.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.memorystore.UpdateInstanceRequest): + The request object. Request message for [UpdateInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + request, metadata = self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def create_instance(self) -> Callable[ + [memorystore.CreateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [memorystore.DeleteInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_certificate_authority(self) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + memorystore.CertificateAuthority]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [memorystore.GetInstanceRequest], + memorystore.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [memorystore.ListInstancesRequest], + memorystore.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [memorystore.UpdateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MemorystoreRestTransport', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py new file mode 100644 index 000000000000..3155d66c201d --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import MemorystoreTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.memorystore_v1.types import memorystore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseMemorystoreRestTransport(MemorystoreTransport): + """Base REST backend transport for Memorystore. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'memorystore.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'memorystore.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetCertificateAuthority: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetCertificateAuthorityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstances: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseMemorystoreRestTransport', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py new file mode 100644 index 000000000000..fc13543f2db2 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .memorystore import ( + CertificateAuthority, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + UpdateInstanceRequest, + ZoneDistributionConfig, + ConnectionType, + PscConnectionStatus, +) + +__all__ = ( + 'CertificateAuthority', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'DiscoveryEndpoint', + 'GetCertificateAuthorityRequest', + 'GetInstanceRequest', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'NodeConfig', + 'OperationMetadata', + 'PersistenceConfig', + 'PscAutoConnection', + 'PscConnection', + 'UpdateInstanceRequest', + 'ZoneDistributionConfig', + 'ConnectionType', + 'PscConnectionStatus', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py new file mode 100644 index 000000000000..ed348f00d027 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py @@ -0,0 +1,1225 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.memorystore.v1', + manifest={ + 'PscConnectionStatus', + 'ConnectionType', + 'Instance', + 'PscAutoConnection', + 'PscConnection', + 'DiscoveryEndpoint', + 'PersistenceConfig', + 'NodeConfig', + 'ZoneDistributionConfig', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'UpdateInstanceRequest', + 'DeleteInstanceRequest', + 'GetCertificateAuthorityRequest', + 'CertificateAuthority', + 'OperationMetadata', + }, +) + + +class PscConnectionStatus(proto.Enum): + r"""Status of the PSC connection. + + Values: + PSC_CONNECTION_STATUS_UNSPECIFIED (0): + PSC connection status is not specified. + ACTIVE (1): + The connection is active + NOT_FOUND (2): + Connection not found + """ + PSC_CONNECTION_STATUS_UNSPECIFIED = 0 + ACTIVE = 1 + NOT_FOUND = 2 + + +class ConnectionType(proto.Enum): + r"""Type of a PSC connection + + Values: + CONNECTION_TYPE_UNSPECIFIED (0): + Connection Type is not set + CONNECTION_TYPE_DISCOVERY (1): + Connection that will be used for topology + discovery. + CONNECTION_TYPE_PRIMARY (2): + Connection that will be used as primary + endpoint to access primary. + CONNECTION_TYPE_READER (3): + Connection that will be used as reader + endpoint to access replicas. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + CONNECTION_TYPE_DISCOVERY = 1 + CONNECTION_TYPE_PRIMARY = 2 + CONNECTION_TYPE_READER = 3 + + +class Instance(proto.Message): + r"""A Memorystore instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. Unique name of the instance. + Format: + projects/{project}/locations/{location}/instances/{instance} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation timestamp of the + instance. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Latest update timestamp of the + instance. + labels (MutableMapping[str, str]): + Optional. Labels to represent user-provided + metadata. + state (google.cloud.memorystore_v1.types.Instance.State): + Output only. Current state of the instance. + state_info (google.cloud.memorystore_v1.types.Instance.StateInfo): + Output only. Additional information about the + state of the instance. + uid (str): + Output only. System assigned, unique + identifier for the instance. + replica_count (int): + Optional. Number of replica nodes per shard. + If omitted the default is 0 replicas. + + This field is a member of `oneof`_ ``_replica_count``. + authorization_mode (google.cloud.memorystore_v1.types.Instance.AuthorizationMode): + Optional. Immutable. Authorization mode of + the instance. + transit_encryption_mode (google.cloud.memorystore_v1.types.Instance.TransitEncryptionMode): + Optional. Immutable. In-transit encryption + mode of the instance. + shard_count (int): + Optional. Number of shards for the instance. + discovery_endpoints (MutableSequence[google.cloud.memorystore_v1.types.DiscoveryEndpoint]): + Output only. Endpoints clients can connect to + the instance through. Currently only one + discovery endpoint is supported. + node_type (google.cloud.memorystore_v1.types.Instance.NodeType): + Optional. Immutable. Machine type for + individual nodes of the instance. + persistence_config (google.cloud.memorystore_v1.types.PersistenceConfig): + Optional. Persistence configuration of the + instance. + engine_version (str): + Optional. Immutable. Engine version of the + instance. + engine_configs (MutableMapping[str, str]): + Optional. User-provided engine configurations + for the instance. + node_config (google.cloud.memorystore_v1.types.NodeConfig): + Output only. Configuration of individual + nodes of the instance. + zone_distribution_config (google.cloud.memorystore_v1.types.ZoneDistributionConfig): + Optional. Immutable. Zone distribution + configuration of the instance for node + allocation. + deletion_protection_enabled (bool): + Optional. If set to true deletion of the + instance will fail. + + This field is a member of `oneof`_ ``_deletion_protection_enabled``. + psc_auto_connections (MutableSequence[google.cloud.memorystore_v1.types.PscAutoConnection]): + Required. Immutable. User inputs and resource + details of the auto-created PSC connections. + endpoints (MutableSequence[google.cloud.memorystore_v1.types.Instance.InstanceEndpoint]): + Optional. Endpoints for the instance. + mode (google.cloud.memorystore_v1.types.Instance.Mode): + Optional. The mode config for the instance. + """ + class State(proto.Enum): + r"""Possible states of the instance. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Instance is being created. + ACTIVE (2): + Instance has been created and is usable. + UPDATING (3): + Instance is being updated. + DELETING (4): + Instance is being deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + UPDATING = 3 + DELETING = 4 + + class AuthorizationMode(proto.Enum): + r"""Possible authorization modes of the instance. + + Values: + AUTHORIZATION_MODE_UNSPECIFIED (0): + Not set. + AUTH_DISABLED (1): + Authorization disabled. + IAM_AUTH (2): + IAM basic authorization. + """ + AUTHORIZATION_MODE_UNSPECIFIED = 0 + AUTH_DISABLED = 1 + IAM_AUTH = 2 + + class TransitEncryptionMode(proto.Enum): + r"""Possible in-transit encryption modes of the instance. + + Values: + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED (0): + Not set. + TRANSIT_ENCRYPTION_DISABLED (1): + In-transit encryption is disabled. + SERVER_AUTHENTICATION (2): + Server-managed encryption is used for + in-transit encryption. + """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 + TRANSIT_ENCRYPTION_DISABLED = 1 + SERVER_AUTHENTICATION = 2 + + class NodeType(proto.Enum): + r"""Possible node types of the instance. See + https://cloud.google.com/memorystore/docs/valkey/instance-node-specification + for more information. + + Values: + NODE_TYPE_UNSPECIFIED (0): + Not set. + SHARED_CORE_NANO (1): + Shared core nano. + HIGHMEM_MEDIUM (2): + High memory medium. + HIGHMEM_XLARGE (3): + High memory extra large. + STANDARD_SMALL (4): + Standard small. + """ + NODE_TYPE_UNSPECIFIED = 0 + SHARED_CORE_NANO = 1 + HIGHMEM_MEDIUM = 2 + HIGHMEM_XLARGE = 3 + STANDARD_SMALL = 4 + + class Mode(proto.Enum): + r"""The mode config, which is used to enable/disable cluster + mode. + + Values: + MODE_UNSPECIFIED (0): + Mode is not specified. + STANDALONE (1): + Instance is in standalone mode. + CLUSTER (2): + Instance is in cluster mode. + """ + MODE_UNSPECIFIED = 0 + STANDALONE = 1 + CLUSTER = 2 + + class StateInfo(proto.Message): + r"""Additional information about the state of the instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + update_info (google.cloud.memorystore_v1.types.Instance.StateInfo.UpdateInfo): + Output only. Describes ongoing update when + instance state is UPDATING. + + This field is a member of `oneof`_ ``info``. + """ + + class UpdateInfo(proto.Message): + r"""Represents information about instance with state UPDATING. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_shard_count (int): + Output only. Target number of shards for the + instance. + + This field is a member of `oneof`_ ``_target_shard_count``. + target_replica_count (int): + Output only. Target number of replica nodes + per shard for the instance. + + This field is a member of `oneof`_ ``_target_replica_count``. + """ + + target_shard_count: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + target_replica_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + update_info: 'Instance.StateInfo.UpdateInfo' = proto.Field( + proto.MESSAGE, + number=1, + oneof='info', + message='Instance.StateInfo.UpdateInfo', + ) + + class InstanceEndpoint(proto.Message): + r"""InstanceEndpoint consists of PSC connections that are created + as a group in each VPC network for accessing the instance. In + each group, there shall be one connection for each service + attachment in the cluster. + + Attributes: + connections (MutableSequence[google.cloud.memorystore_v1.types.Instance.ConnectionDetail]): + Optional. A group of PSC connections. They + are created in the same VPC network, one for + each service attachment in the cluster. + """ + + connections: MutableSequence['Instance.ConnectionDetail'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance.ConnectionDetail', + ) + + class ConnectionDetail(proto.Message): + r"""Information of each PSC connection. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + psc_auto_connection (google.cloud.memorystore_v1.types.PscAutoConnection): + Detailed information of a PSC connection that + is created through service connectivity + automation. + + This field is a member of `oneof`_ ``connection``. + psc_connection (google.cloud.memorystore_v1.types.PscConnection): + Detailed information of a PSC connection that + is created by the user. + + This field is a member of `oneof`_ ``connection``. + """ + + psc_auto_connection: 'PscAutoConnection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='connection', + message='PscAutoConnection', + ) + psc_connection: 'PscConnection' = proto.Field( + proto.MESSAGE, + number=2, + oneof='connection', + message='PscConnection', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + state_info: StateInfo = proto.Field( + proto.MESSAGE, + number=6, + message=StateInfo, + ) + uid: str = proto.Field( + proto.STRING, + number=7, + ) + replica_count: int = proto.Field( + proto.INT32, + number=8, + optional=True, + ) + authorization_mode: AuthorizationMode = proto.Field( + proto.ENUM, + number=9, + enum=AuthorizationMode, + ) + transit_encryption_mode: TransitEncryptionMode = proto.Field( + proto.ENUM, + number=10, + enum=TransitEncryptionMode, + ) + shard_count: int = proto.Field( + proto.INT32, + number=11, + ) + discovery_endpoints: MutableSequence['DiscoveryEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message='DiscoveryEndpoint', + ) + node_type: NodeType = proto.Field( + proto.ENUM, + number=13, + enum=NodeType, + ) + persistence_config: 'PersistenceConfig' = proto.Field( + proto.MESSAGE, + number=14, + message='PersistenceConfig', + ) + engine_version: str = proto.Field( + proto.STRING, + number=15, + ) + engine_configs: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + node_config: 'NodeConfig' = proto.Field( + proto.MESSAGE, + number=17, + message='NodeConfig', + ) + zone_distribution_config: 'ZoneDistributionConfig' = proto.Field( + proto.MESSAGE, + number=18, + message='ZoneDistributionConfig', + ) + deletion_protection_enabled: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + psc_auto_connections: MutableSequence['PscAutoConnection'] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message='PscAutoConnection', + ) + endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=InstanceEndpoint, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=26, + enum=Mode, + ) + + +class PscAutoConnection(proto.Message): + r"""Details of consumer resources in a PSC connection. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + port (int): + Optional. Output only. port will only be set + for Primary/Reader or Discovery endpoint. + + This field is a member of `oneof`_ ``ports``. + psc_connection_id (str): + Output only. The PSC connection id of the + forwarding rule connected to the service + attachment. + ip_address (str): + Output only. The IP allocated on the consumer + network for the PSC forwarding rule. + forwarding_rule (str): + Output only. The URI of the consumer side forwarding rule. + Format: + projects/{project}/regions/{region}/forwardingRules/{forwarding_rule} + project_id (str): + Required. The consumer project_id where PSC connections are + established. This should be the same project_id that the + instance is being created in. + network (str): + Required. The network where the PSC endpoints are created, + in the form of + projects/{project_id}/global/networks/{network_id}. + service_attachment (str): + Output only. The service attachment which is + the target of the PSC connection, in the form of + projects/{project-id}/regions/{region}/serviceAttachments/{service-attachment-id}. + psc_connection_status (google.cloud.memorystore_v1.types.PscConnectionStatus): + Output only. The status of the PSC + connection: whether a connection exists and + ACTIVE or it no longer exists. Please note that + this value is updated periodically. Please use + Private Service Connect APIs for the latest + status. + connection_type (google.cloud.memorystore_v1.types.ConnectionType): + Output only. Type of the PSC connection. + """ + + port: int = proto.Field( + proto.INT32, + number=9, + oneof='ports', + ) + psc_connection_id: str = proto.Field( + proto.STRING, + number=1, + ) + ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=6, + ) + psc_connection_status: 'PscConnectionStatus' = proto.Field( + proto.ENUM, + number=7, + enum='PscConnectionStatus', + ) + connection_type: 'ConnectionType' = proto.Field( + proto.ENUM, + number=8, + enum='ConnectionType', + ) + + +class PscConnection(proto.Message): + r"""User created Psc connection configuration. + + Attributes: + psc_connection_id (str): + Output only. The PSC connection id of the + forwarding rule connected to the service + attachment. + ip_address (str): + Required. The IP allocated on the consumer + network for the PSC forwarding rule. + forwarding_rule (str): + Required. The URI of the consumer side forwarding rule. + Format: + projects/{project}/regions/{region}/forwardingRules/{forwarding_rule} + project_id (str): + Output only. The consumer project_id where the forwarding + rule is created from. + network (str): + Required. The consumer network where the IP address resides, + in the form of + projects/{project_id}/global/networks/{network_id}. + service_attachment (str): + Required. The service attachment which is the + target of the PSC connection, in the form of + projects/{project-id}/regions/{region}/serviceAttachments/{service-attachment-id}. + psc_connection_status (google.cloud.memorystore_v1.types.PscConnectionStatus): + Output only. The status of the PSC + connection: whether a connection exists and + ACTIVE or it no longer exists. Please note that + this value is updated periodically. Please use + Private Service Connect APIs for the latest + status. + connection_type (google.cloud.memorystore_v1.types.ConnectionType): + Output only. Type of the PSC connection. + """ + + psc_connection_id: str = proto.Field( + proto.STRING, + number=1, + ) + ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=6, + ) + psc_connection_status: 'PscConnectionStatus' = proto.Field( + proto.ENUM, + number=7, + enum='PscConnectionStatus', + ) + connection_type: 'ConnectionType' = proto.Field( + proto.ENUM, + number=8, + enum='ConnectionType', + ) + + +class DiscoveryEndpoint(proto.Message): + r"""Represents an endpoint for clients to connect to the + instance. + + Attributes: + address (str): + Output only. IP address of the exposed + endpoint clients connect to. + port (int): + Output only. The port number of the exposed + endpoint. + network (str): + Output only. The network where the IP address of the + discovery endpoint will be reserved, in the form of + projects/{network_project}/global/networks/{network_id}. + """ + + address: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + network: str = proto.Field( + proto.STRING, + number=4, + ) + + +class PersistenceConfig(proto.Message): + r"""Represents persistence configuration for a instance. + + Attributes: + mode (google.cloud.memorystore_v1.types.PersistenceConfig.PersistenceMode): + Optional. Current persistence mode. + rdb_config (google.cloud.memorystore_v1.types.PersistenceConfig.RDBConfig): + Optional. RDB configuration. This field will + be ignored if mode is not RDB. + aof_config (google.cloud.memorystore_v1.types.PersistenceConfig.AOFConfig): + Optional. AOF configuration. This field will + be ignored if mode is not AOF. + """ + class PersistenceMode(proto.Enum): + r"""Possible persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled, and any snapshot + data is deleted. + RDB (2): + RDB based persistence is enabled. + AOF (3): + AOF based persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + AOF = 3 + + class RDBConfig(proto.Message): + r"""Configuration for RDB based persistence. + + Attributes: + rdb_snapshot_period (google.cloud.memorystore_v1.types.PersistenceConfig.RDBConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time that the first snapshot + was/will be attempted, and to which future + snapshots will be aligned. If not provided, the + current time will be used. + """ + class SnapshotPeriod(proto.Enum): + r"""Possible snapshot periods. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (1): + One hour. + SIX_HOURS (2): + Six hours. + TWELVE_HOURS (3): + Twelve hours. + TWENTY_FOUR_HOURS (4): + Twenty four hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 1 + SIX_HOURS = 2 + TWELVE_HOURS = 3 + TWENTY_FOUR_HOURS = 4 + + rdb_snapshot_period: 'PersistenceConfig.RDBConfig.SnapshotPeriod' = proto.Field( + proto.ENUM, + number=1, + enum='PersistenceConfig.RDBConfig.SnapshotPeriod', + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class AOFConfig(proto.Message): + r"""Configuration for AOF based persistence. + + Attributes: + append_fsync (google.cloud.memorystore_v1.types.PersistenceConfig.AOFConfig.AppendFsync): + Optional. The fsync mode. + """ + class AppendFsync(proto.Enum): + r"""Possible fsync modes. + + Values: + APPEND_FSYNC_UNSPECIFIED (0): + Not set. Default: EVERY_SEC + NEVER (1): + Never fsync. Normally Linux will flush data + every 30 seconds with this configuration, but + it's up to the kernel's exact tuning. + EVERY_SEC (2): + Fsync every second. You may lose 1 second of + data if there is a disaster. + ALWAYS (3): + Fsync every time new write commands are + appended to the AOF. The best data loss + protection at the cost of performance. + """ + APPEND_FSYNC_UNSPECIFIED = 0 + NEVER = 1 + EVERY_SEC = 2 + ALWAYS = 3 + + append_fsync: 'PersistenceConfig.AOFConfig.AppendFsync' = proto.Field( + proto.ENUM, + number=1, + enum='PersistenceConfig.AOFConfig.AppendFsync', + ) + + mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_config: RDBConfig = proto.Field( + proto.MESSAGE, + number=2, + message=RDBConfig, + ) + aof_config: AOFConfig = proto.Field( + proto.MESSAGE, + number=3, + message=AOFConfig, + ) + + +class NodeConfig(proto.Message): + r"""Represents configuration for nodes of the instance. + + Attributes: + size_gb (float): + Output only. Memory size in GB of the node. + """ + + size_gb: float = proto.Field( + proto.DOUBLE, + number=1, + ) + + +class ZoneDistributionConfig(proto.Message): + r"""Zone distribution configuration for allocation of instance + resources. + + Attributes: + zone (str): + Optional. Defines zone where all resources will be allocated + with SINGLE_ZONE mode. Ignored for MULTI_ZONE mode. + mode (google.cloud.memorystore_v1.types.ZoneDistributionConfig.ZoneDistributionMode): + Optional. Current zone distribution mode. Defaults to + MULTI_ZONE. + """ + class ZoneDistributionMode(proto.Enum): + r"""Possible zone distribution modes. + + Values: + ZONE_DISTRIBUTION_MODE_UNSPECIFIED (0): + Not Set. Default: MULTI_ZONE + MULTI_ZONE (1): + Distribute resources across 3 zones picked at + random within the region. + SINGLE_ZONE (2): + Provision resources in a single zone. Zone + field must be specified. + """ + ZONE_DISTRIBUTION_MODE_UNSPECIFIED = 0 + MULTI_ZONE = 1 + SINGLE_ZONE = 2 + + zone: str = proto.Field( + proto.STRING, + number=2, + ) + mode: ZoneDistributionMode = proto.Field( + proto.ENUM, + number=1, + enum=ZoneDistributionMode, + ) + + +class ListInstancesRequest(proto.Message): + r"""Request message for [ListInstances][]. + + Attributes: + parent (str): + Required. The parent to list instances from. + Format: projects/{project}/locations/{location} + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Expression for filtering results. + order_by (str): + Optional. Sort results by a defined order. Supported values: + "name", "create_time". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Response message for [ListInstances][]. + + Attributes: + instances (MutableSequence[google.cloud.memorystore_v1.types.Instance]): + If the {location} requested was "-" the + response contains a list of instances from all + locations. Instances in unreachable locations + will be omitted. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Request message for [GetInstance][]. + + Attributes: + name (str): + Required. The name of the instance to + retrieve. Format: + projects/{project}/locations/{location}/instances/{instance} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Request message for [CreateInstance][]. + + Attributes: + parent (str): + Required. The parent resource where this + instance will be created. Format: + projects/{project}/locations/{location} + instance_id (str): + Required. The ID to use for the instance, which will become + the final component of the instance's resource name. + + This value is subject to the following restrictions: + + - Must be 4-63 characters in length + - Must begin with a letter or digit + - Must contain only lowercase letters, digits, and hyphens + - Must not end with a hyphen + - Must be unique within a location + instance (google.cloud.memorystore_v1.types.Instance): + Required. The instance to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Request message for [UpdateInstance][]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be updated on + the instance. At least one field must be + specified. + instance (google.cloud.memorystore_v1.types.Instance): + Required. The instance to update. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=2, + message='Instance', + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Request message for [DeleteInstance][]. + + Attributes: + name (str): + Required. The name of the instance to delete. + Format: + projects/{project}/locations/{location}/instances/{instance} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCertificateAuthorityRequest(proto.Message): + r"""Request message for [GetCertificateAuthority][]. + + Attributes: + name (str): + Required. The name of the certificate + authority. Format: + + projects/{project}/locations/{location}/instances/{instance}/certificateAuthority + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CertificateAuthority(proto.Message): + r"""A certificate authority for an instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_server_ca (google.cloud.memorystore_v1.types.CertificateAuthority.ManagedCertificateAuthority): + A managed server certificate authority. + + This field is a member of `oneof`_ ``server_ca``. + name (str): + Identifier. Unique name of the certificate + authority. Format: + + projects/{project}/locations/{location}/instances/{instance} + """ + + class ManagedCertificateAuthority(proto.Message): + r"""A managed certificate authority. + + Attributes: + ca_certs (MutableSequence[google.cloud.memorystore_v1.types.CertificateAuthority.ManagedCertificateAuthority.CertChain]): + PEM encoded CA certificate chains for managed + server authentication. + """ + + class CertChain(proto.Message): + r"""A certificate chain. + + Attributes: + certificates (MutableSequence[str]): + The certificates that form the CA chain in + order of leaf to root. + """ + + certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + ca_certs: MutableSequence['CertificateAuthority.ManagedCertificateAuthority.CertChain'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='CertificateAuthority.ManagedCertificateAuthority.CertChain', + ) + + managed_server_ca: ManagedCertificateAuthority = proto.Field( + proto.MESSAGE, + number=2, + oneof='server_ca', + message=ManagedCertificateAuthority, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/mypy.ini b/owl-bot-staging/google-cloud-memorystore/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py b/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py new file mode 100644 index 000000000000..f0629be85ae7 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-memorystore' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/memorystore_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/memorystore_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py new file mode 100644 index 000000000000..40b633c4d102 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_create_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_CreateInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py new file mode 100644 index 000000000000..45abcade82bf --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_delete_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_DeleteInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py new file mode 100644 index 000000000000..f89c696f7a29 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_GetCertificateAuthority_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_get_certificate_authority(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_certificate_authority(request=request) + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_GetCertificateAuthority_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py new file mode 100644 index 000000000000..cd66a73b6524 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_get_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_GetInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py new file mode 100644 index 000000000000..87e2b659aeef --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_list_instances(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END memorystore_v1_generated_Memorystore_ListInstances_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py new file mode 100644 index 000000000000..6abf253cc26f --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_update_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_UpdateInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json new file mode 100644 index 000000000000..6e67c4d3be43 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json @@ -0,0 +1,507 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.memorystore.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-memorystore", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.create_instance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.CreateInstance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.memorystore_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "memorystore_v1_generated_memorystore_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_CreateInstance_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.delete_instance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.DeleteInstance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "memorystore_v1_generated_memorystore_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.get_certificate_authority", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.GetCertificateAuthority", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "GetCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.GetCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memorystore_v1.types.CertificateAuthority", + "shortName": "get_certificate_authority" + }, + "description": "Sample for GetCertificateAuthority", + "file": "memorystore_v1_generated_memorystore_get_certificate_authority_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_GetCertificateAuthority_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_get_certificate_authority_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.get_instance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.GetInstance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memorystore_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "memorystore_v1_generated_memorystore_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.list_instances", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.ListInstances", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memorystore_v1.services.memorystore.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "memorystore_v1_generated_memorystore_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.update_instance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.UpdateInstance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.memorystore_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "memorystore_v1_generated_memorystore_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_UpdateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_update_instance_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py b/owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py new file mode 100644 index 000000000000..f79de1f612d8 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class memorystoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', ), + 'delete_instance': ('name', 'request_id', ), + 'get_certificate_authority': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('instance', 'update_mask', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=memorystoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the memorystore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/setup.py b/owl-bot-staging/google-cloud-memorystore/v1/setup.py new file mode 100644 index 000000000000..3cb03e5a17ca --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-memorystore' + + +description = "Google Cloud Memorystore API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/memorystore/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py new file mode 100644 index 000000000000..75206a8bb252 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py @@ -0,0 +1,3401 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.memorystore_v1.services.memorystore import MemorystoreClient +from google.cloud.memorystore_v1.services.memorystore import pagers +from google.cloud.memorystore_v1.services.memorystore import transports +from google.cloud.memorystore_v1.types import memorystore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MemorystoreClient._get_default_mtls_endpoint(None) is None + assert MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert MemorystoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MemorystoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MemorystoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + MemorystoreClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MemorystoreClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MemorystoreClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MemorystoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MemorystoreClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MemorystoreClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MemorystoreClient._get_client_cert_source(None, False) is None + assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert MemorystoreClient._get_client_cert_source(None, True) is mock_default_cert_source + assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MemorystoreClient._DEFAULT_UNIVERSE + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert MemorystoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + assert MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert MemorystoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert MemorystoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert MemorystoreClient._get_universe_domain(None, None) == MemorystoreClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + MemorystoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MemorystoreClient, "rest"), +]) +def test_memorystore_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'memorystore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://memorystore.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MemorystoreRestTransport, "rest"), +]) +def test_memorystore_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MemorystoreClient, "rest"), +]) +def test_memorystore_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'memorystore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://memorystore.googleapis.com' + ) + + +def test_memorystore_client_get_transport_class(): + transport = MemorystoreClient.get_transport_class() + available_transports = [ + transports.MemorystoreRestTransport, + ] + assert transport in available_transports + + transport = MemorystoreClient.get_transport_class("rest") + assert transport == transports.MemorystoreRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), +]) +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +def test_memorystore_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), +]) +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_memorystore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MemorystoreClient +]) +@mock.patch.object(MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient)) +def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + MemorystoreClient +]) +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +def test_memorystore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MemorystoreClient._DEFAULT_UNIVERSE + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), +]) +def test_memorystore_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), +]) +def test_memorystore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields(request_type=memorystore.ListInstancesRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instances_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + memorystore.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + memorystore.ListInstancesResponse( + instances=[ + memorystore.Instance(), + memorystore.Instance(), + memorystore.Instance(), + ], + next_page_token='abc', + ), + memorystore.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + memorystore.ListInstancesResponse( + instances=[ + memorystore.Instance(), + ], + next_page_token='ghi', + ), + memorystore.ListInstancesResponse( + instances=[ + memorystore.Instance(), + memorystore.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(memorystore.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, memorystore.Instance) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + memorystore.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields(request_type=memorystore.CreateInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instanceId", "requestId", )) & set(("parent", "instanceId", "instance", ))) + + +def test_create_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance=memorystore.Instance(name='name_value'), + instance_id='instance_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_create_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + memorystore.CreateInstanceRequest(), + parent='parent_value', + instance=memorystore.Instance(name='name_value'), + instance_id='instance_id_value', + ) + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields(request_type=memorystore.UpdateInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("instance", ))) + + +def test_update_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + instance=memorystore.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_update_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + memorystore.UpdateInstanceRequest(), + instance=memorystore.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + + +def test_delete_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_delete_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + memorystore.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_certificate_authority in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_certificate_authority] = mock_rpc + + request = {} + client.get_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_certificate_authority_rest_required_fields(request_type=memorystore.GetCertificateAuthorityRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_certificate_authority(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_certificate_authority_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_certificate_authority._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_certificate_authority_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_certificate_authority(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority" % client.transport._host, args[1]) + + +def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_certificate_authority( + memorystore.GetCertificateAuthorityRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MemorystoreClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.MemorystoreRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_rest(): + transport = MemorystoreClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_instances(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.ListInstancesRequest.pb(memorystore.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = memorystore.ListInstancesResponse.to_json(memorystore.ListInstancesResponse()) + req.return_value.content = return_value + + request = memorystore.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.ListInstancesResponse() + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance( + name='name_value', + state=memorystore.Instance.State.CREATING, + uid='uid_value', + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version='engine_version_value', + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.Instance) + assert response.name == 'name_value' + assert response.state == memorystore.Instance.State.CREATING + assert response.uid == 'uid_value' + assert response.replica_count == 1384 + assert response.authorization_mode == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + assert response.transit_encryption_mode == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + assert response.shard_count == 1178 + assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO + assert response.engine_version == 'engine_version_value' + assert response.deletion_protection_enabled is True + assert response.mode == memorystore.Instance.Mode.STANDALONE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = memorystore.Instance.to_json(memorystore.Instance()) + req.return_value.content = return_value + + request = memorystore.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.Instance() + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = memorystore.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.CreateInstanceRequest.pb(memorystore.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = memorystore.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.UpdateInstanceRequest.pb(memorystore.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.DeleteInstanceRequest.pb(memorystore.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_certificate_authority_rest_bad_request(request_type=memorystore.GetCertificateAuthorityRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_certificate_authority(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.GetCertificateAuthorityRequest, + dict, +]) +def test_get_certificate_authority_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_certificate_authority(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.CertificateAuthority) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_certificate_authority_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_certificate_authority") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_certificate_authority") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.GetCertificateAuthorityRequest.pb(memorystore.GetCertificateAuthorityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = memorystore.CertificateAuthority.to_json(memorystore.CertificateAuthority()) + req.return_value.content = return_value + + request = memorystore.GetCertificateAuthorityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.CertificateAuthority() + + client.get_certificate_authority(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_certificate_authority_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_certificate_authority), + '__call__') as call: + client.get_certificate_authority(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetCertificateAuthorityRequest() + + assert args[0] == request_msg + + +def test_memorystore_rest_lro_client(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_memorystore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MemorystoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_memorystore_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MemorystoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instances', + 'get_instance', + 'create_instance', + 'update_instance', + 'delete_instance', + 'get_certificate_authority', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_memorystore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MemorystoreTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_memorystore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MemorystoreTransport() + adc.assert_called_once() + + +def test_memorystore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MemorystoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_memorystore_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MemorystoreRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_memorystore_host_no_port(transport_name): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'memorystore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://memorystore.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_memorystore_host_with_port(transport_name): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'memorystore.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://memorystore.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_memorystore_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MemorystoreClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MemorystoreClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.get_certificate_authority._session + session2 = client2.transport.get_certificate_authority._session + assert session1 != session2 + +def test_certificate_authority_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + actual = MemorystoreClient.certificate_authority_path(project, location, instance) + assert expected == actual + + +def test_parse_certificate_authority_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = MemorystoreClient.certificate_authority_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_certificate_authority_path(path) + assert expected == actual + +def test_forwarding_rule_path(): + project = "cuttlefish" + region = "mussel" + forwarding_rule = "winkle" + expected = "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + actual = MemorystoreClient.forwarding_rule_path(project, region, forwarding_rule) + assert expected == actual + + +def test_parse_forwarding_rule_path(): + expected = { + "project": "nautilus", + "region": "scallop", + "forwarding_rule": "abalone", + } + path = MemorystoreClient.forwarding_rule_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_forwarding_rule_path(path) + assert expected == actual + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + actual = MemorystoreClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = MemorystoreClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_instance_path(path) + assert expected == actual + +def test_network_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = MemorystoreClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = MemorystoreClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_network_path(path) + assert expected == actual + +def test_service_attachment_path(): + project = "scallop" + region = "abalone" + service_attachment = "squid" + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + actual = MemorystoreClient.service_attachment_path(project, region, service_attachment) + assert expected == actual + + +def test_parse_service_attachment_path(): + expected = { + "project": "clam", + "region": "whelk", + "service_attachment": "octopus", + } + path = MemorystoreClient.service_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_service_attachment_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MemorystoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MemorystoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = MemorystoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MemorystoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MemorystoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MemorystoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = MemorystoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MemorystoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MemorystoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MemorystoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + transport_class = MemorystoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MemorystoreClient, transports.MemorystoreRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc b/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc new file mode 100644 index 000000000000..90ec0ce4fe89 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/memorystore/__init__.py + google/cloud/memorystore/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 b/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in b/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in new file mode 100644 index 000000000000..fa2894ae07c0 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/memorystore *.py +recursive-include google/cloud/memorystore_v1beta *.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst new file mode 100644 index 000000000000..6f935a43af2b --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Memorystore API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Memorystore API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py new file mode 100644 index 000000000000..8d134830ab88 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-memorystore documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-memorystore" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-memorystore-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-memorystore.tex", + u"google-cloud-memorystore Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-memorystore", + u"Google Cloud Memorystore Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-memorystore", + u"google-cloud-memorystore Documentation", + author, + "google-cloud-memorystore", + "GAPIC library for Google Cloud Memorystore API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst new file mode 100644 index 000000000000..6c5c2af1be0f --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + memorystore_v1beta/services_ + memorystore_v1beta/types_ diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst new file mode 100644 index 000000000000..38115153a61d --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst @@ -0,0 +1,10 @@ +Memorystore +----------------------------- + +.. automodule:: google.cloud.memorystore_v1beta.services.memorystore + :members: + :inherited-members: + +.. automodule:: google.cloud.memorystore_v1beta.services.memorystore.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst new file mode 100644 index 000000000000..60091568cc47 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Memorystore v1beta API +================================================ +.. toctree:: + :maxdepth: 2 + + memorystore diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst new file mode 100644 index 000000000000..47ad5b81b425 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Memorystore v1beta API +============================================= + +.. automodule:: google.cloud.memorystore_v1beta.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py new file mode 100644 index 000000000000..df718a2f7777 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.memorystore_v1beta.services.memorystore.client import MemorystoreClient + +from google.cloud.memorystore_v1beta.types.memorystore import CertificateAuthority +from google.cloud.memorystore_v1beta.types.memorystore import CreateInstanceRequest +from google.cloud.memorystore_v1beta.types.memorystore import DeleteInstanceRequest +from google.cloud.memorystore_v1beta.types.memorystore import DiscoveryEndpoint +from google.cloud.memorystore_v1beta.types.memorystore import GetCertificateAuthorityRequest +from google.cloud.memorystore_v1beta.types.memorystore import GetInstanceRequest +from google.cloud.memorystore_v1beta.types.memorystore import Instance +from google.cloud.memorystore_v1beta.types.memorystore import ListInstancesRequest +from google.cloud.memorystore_v1beta.types.memorystore import ListInstancesResponse +from google.cloud.memorystore_v1beta.types.memorystore import NodeConfig +from google.cloud.memorystore_v1beta.types.memorystore import OperationMetadata +from google.cloud.memorystore_v1beta.types.memorystore import PersistenceConfig +from google.cloud.memorystore_v1beta.types.memorystore import PscAutoConnection +from google.cloud.memorystore_v1beta.types.memorystore import PscConnection +from google.cloud.memorystore_v1beta.types.memorystore import UpdateInstanceRequest +from google.cloud.memorystore_v1beta.types.memorystore import ZoneDistributionConfig +from google.cloud.memorystore_v1beta.types.memorystore import ConnectionType +from google.cloud.memorystore_v1beta.types.memorystore import PscConnectionStatus + +__all__ = ('MemorystoreClient', + 'CertificateAuthority', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'DiscoveryEndpoint', + 'GetCertificateAuthorityRequest', + 'GetInstanceRequest', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'NodeConfig', + 'OperationMetadata', + 'PersistenceConfig', + 'PscAutoConnection', + 'PscConnection', + 'UpdateInstanceRequest', + 'ZoneDistributionConfig', + 'ConnectionType', + 'PscConnectionStatus', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed new file mode 100644 index 000000000000..3e10cbb3572e --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-memorystore package uses inline types. diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py new file mode 100644 index 000000000000..72d6431d3df2 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.memorystore import MemorystoreClient + +from .types.memorystore import CertificateAuthority +from .types.memorystore import CreateInstanceRequest +from .types.memorystore import DeleteInstanceRequest +from .types.memorystore import DiscoveryEndpoint +from .types.memorystore import GetCertificateAuthorityRequest +from .types.memorystore import GetInstanceRequest +from .types.memorystore import Instance +from .types.memorystore import ListInstancesRequest +from .types.memorystore import ListInstancesResponse +from .types.memorystore import NodeConfig +from .types.memorystore import OperationMetadata +from .types.memorystore import PersistenceConfig +from .types.memorystore import PscAutoConnection +from .types.memorystore import PscConnection +from .types.memorystore import UpdateInstanceRequest +from .types.memorystore import ZoneDistributionConfig +from .types.memorystore import ConnectionType +from .types.memorystore import PscConnectionStatus + +__all__ = ( +'CertificateAuthority', +'ConnectionType', +'CreateInstanceRequest', +'DeleteInstanceRequest', +'DiscoveryEndpoint', +'GetCertificateAuthorityRequest', +'GetInstanceRequest', +'Instance', +'ListInstancesRequest', +'ListInstancesResponse', +'MemorystoreClient', +'NodeConfig', +'OperationMetadata', +'PersistenceConfig', +'PscAutoConnection', +'PscConnection', +'PscConnectionStatus', +'UpdateInstanceRequest', +'ZoneDistributionConfig', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json new file mode 100644 index 000000000000..0951913c6fff --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json @@ -0,0 +1,48 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.memorystore_v1beta", + "protoPackage": "google.cloud.memorystore.v1beta", + "schema": "1.0", + "services": { + "Memorystore": { + "clients": { + "rest": { + "libraryClient": "MemorystoreClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetCertificateAuthority": { + "methods": [ + "get_certificate_authority" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py new file mode 100644 index 000000000000..558c8aab67c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed new file mode 100644 index 000000000000..3e10cbb3572e --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-memorystore package uses inline types. diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py new file mode 100644 index 000000000000..e2240b4bffb7 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MemorystoreClient + +__all__ = ( + 'MemorystoreClient', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py new file mode 100644 index 000000000000..f543e438492b --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py @@ -0,0 +1,1636 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.memorystore_v1beta import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1beta.services.memorystore import pagers +from google.cloud.memorystore_v1beta.types import memorystore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MemorystoreTransport, DEFAULT_CLIENT_INFO +from .transports.rest import MemorystoreRestTransport + + +class MemorystoreClientMeta(type): + """Metaclass for the Memorystore client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] + _transport_registry["rest"] = MemorystoreRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MemorystoreTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MemorystoreClient(metaclass=MemorystoreClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "memorystore.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "memorystore.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MemorystoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MemorystoreClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MemorystoreTransport: + """Returns the transport used by the client instance. + + Returns: + MemorystoreTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def certificate_authority_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified certificate_authority string.""" + return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_certificate_authority_path(path: str) -> Dict[str,str]: + """Parses a certificate_authority path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", path) + return m.groupdict() if m else {} + + @staticmethod + def forwarding_rule_path(project: str,region: str,forwarding_rule: str,) -> str: + """Returns a fully-qualified forwarding_rule string.""" + return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + + @staticmethod + def parse_forwarding_rule_path(path: str) -> Dict[str,str]: + """Parses a forwarding_rule path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def network_path(project: str,network: str,) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str,str]: + """Parses a network path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_attachment_path(project: str,region: str,service_attachment: str,) -> str: + """Returns a fully-qualified service_attachment string.""" + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + + @staticmethod + def parse_service_attachment_path(path: str) -> Dict[str,str]: + """Parses a service_attachment path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = MemorystoreClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = MemorystoreClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MemorystoreClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the memorystore client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MemorystoreTransport,Callable[..., MemorystoreTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MemorystoreTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MemorystoreClient._read_environment_variables() + self._client_cert_source = MemorystoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = MemorystoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MemorystoreTransport) + if transport_provided: + # transport is a MemorystoreTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MemorystoreTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + MemorystoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[MemorystoreTransport], Callable[..., MemorystoreTransport]] = ( + MemorystoreClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MemorystoreTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def list_instances(self, + request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists Instances in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_list_instances(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.ListInstancesRequest, dict]): + The request object. Request message for [ListInstances][]. + parent (str): + Required. The parent to list + instances from. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memorystore_v1beta.services.memorystore.pagers.ListInstancesPager: + Response message for [ListInstances][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.ListInstancesRequest): + request = memorystore.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: + r"""Gets details of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_get_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.GetInstanceRequest, dict]): + The request object. Request message for [GetInstance][]. + name (str): + Required. The name of the instance to + retrieve. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memorystore_v1beta.types.Instance: + A Memorystore instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.GetInstanceRequest): + request = memorystore.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[memorystore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new Instance in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_create_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1beta.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1beta.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.CreateInstanceRequest, dict]): + The request object. Request message for [CreateInstance][]. + parent (str): + Required. The parent resource where + this instance will be created. Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.memorystore_v1beta.types.Instance): + Required. The instance to create. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The ID to use for the instance, which will + become the final component of the instance's resource + name. + + This value is subject to the following restrictions: + + - Must be 4-63 characters in length + - Must begin with a letter or digit + - Must contain only lowercase letters, digits, and + hyphens + - Must not end with a hyphen + - Must be unique within a location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1beta.types.Instance` + A Memorystore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance, instance_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.CreateInstanceRequest): + request = memorystore.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance is not None: + request.instance = instance + if instance_id is not None: + request.instance_id = instance_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Instance, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[memorystore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_update_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1beta.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1beta.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.UpdateInstanceRequest, dict]): + The request object. Request message for [UpdateInstance][]. + instance (google.cloud.memorystore_v1beta.types.Instance): + Required. The instance to update. + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be + updated on the instance. At least one + field must be specified. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1beta.types.Instance` + A Memorystore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([instance, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.UpdateInstanceRequest): + request = memorystore.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Instance, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_delete_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.DeleteInstanceRequest, dict]): + The request object. Request message for [DeleteInstance][]. + name (str): + Required. The name of the instance to + delete. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.DeleteInstanceRequest): + request = memorystore.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_certificate_authority(self, + request: Optional[Union[memorystore.GetCertificateAuthorityRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: + r"""Gets details about the certificate authority for an + Instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1beta + + def sample_get_certificate_authority(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.GetCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_certificate_authority(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1beta.types.GetCertificateAuthorityRequest, dict]): + The request object. Request message for [GetCertificateAuthority][]. + name (str): + Required. The name of the certificate + authority. Format: + + projects/{project}/locations/{location}/instances/{instance}/certificateAuthority + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.memorystore_v1beta.types.CertificateAuthority: + A certificate authority for an + instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.GetCertificateAuthorityRequest): + request = memorystore.GetCertificateAuthorityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_certificate_authority] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MemorystoreClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MemorystoreClient", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py new file mode 100644 index 000000000000..efff474686e6 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.memorystore_v1beta.types import memorystore + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memorystore_v1beta.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memorystore_v1beta.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., memorystore.ListInstancesResponse], + request: memorystore.ListInstancesRequest, + response: memorystore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memorystore_v1beta.types.ListInstancesRequest): + The initial request object. + response (google.cloud.memorystore_v1beta.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = memorystore.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[memorystore.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[memorystore.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst new file mode 100644 index 000000000000..1ed25b9404df --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MemorystoreTransport` is the ABC for all transports. +- public child `MemorystoreGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MemorystoreGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMemorystoreRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MemorystoreRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py new file mode 100644 index 000000000000..6172c94a25d4 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MemorystoreTransport +from .rest import MemorystoreRestTransport +from .rest import MemorystoreRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] +_transport_registry['rest'] = MemorystoreRestTransport + +__all__ = ( + 'MemorystoreTransport', + 'MemorystoreRestTransport', + 'MemorystoreRestInterceptor', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py new file mode 100644 index 000000000000..5b8147a83f2f --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.memorystore_v1beta import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1beta.types import memorystore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MemorystoreTransport(abc.ABC): + """Abstract transport class for Memorystore.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'memorystore.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'memorystore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_certificate_authority: gapic_v1.method.wrap_method( + self.get_certificate_authority, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [memorystore.ListInstancesRequest], + Union[ + memorystore.ListInstancesResponse, + Awaitable[memorystore.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [memorystore.GetInstanceRequest], + Union[ + memorystore.Instance, + Awaitable[memorystore.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [memorystore.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [memorystore.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [memorystore.DeleteInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_certificate_authority(self) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + Union[ + memorystore.CertificateAuthority, + Awaitable[memorystore.CertificateAuthority] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MemorystoreTransport', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py new file mode 100644 index 000000000000..74bfb95d397d --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py @@ -0,0 +1,1406 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.memorystore_v1beta.types import memorystore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseMemorystoreRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class MemorystoreRestInterceptor: + """Interceptor for Memorystore. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MemorystoreRestTransport. + + .. code-block:: python + class MyCustomMemorystoreInterceptor(MemorystoreRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_certificate_authority(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_certificate_authority(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MemorystoreRestTransport(interceptor=MyCustomMemorystoreInterceptor()) + client = MemorystoreClient(transport=transport) + + + """ + def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_certificate_authority(self, request: memorystore.GetCertificateAuthorityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_certificate_authority + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_certificate_authority(self, response: memorystore.CertificateAuthority) -> memorystore.CertificateAuthority: + """Post-rpc interceptor for get_certificate_authority + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_instance(self, request: memorystore.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_instance(self, response: memorystore.Instance) -> memorystore.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_list_instances(self, response: memorystore.ListInstancesResponse) -> memorystore.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MemorystoreRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MemorystoreRestInterceptor + + +class MemorystoreRestTransport(_BaseMemorystoreRestTransport): + """REST backend synchronous transport for Memorystore. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'memorystore.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MemorystoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'memorystore.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MemorystoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(_BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: memorystore.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.memorystore.CreateInstanceRequest): + The request object. Request message for [CreateInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + request, metadata = self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(_BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.memorystore.DeleteInstanceRequest): + The request object. Request message for [DeleteInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _GetCertificateAuthority(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetCertificateAuthority") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.GetCertificateAuthorityRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> memorystore.CertificateAuthority: + r"""Call the get certificate authority method over HTTP. + + Args: + request (~.memorystore.GetCertificateAuthorityRequest): + The request object. Request message for [GetCertificateAuthority][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.memorystore.CertificateAuthority: + A certificate authority for an + instance. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + request, metadata = self._interceptor.pre_get_certificate_authority(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetCertificateAuthority._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.CertificateAuthority() + pb_resp = memorystore.CertificateAuthority.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_certificate_authority(resp) + return resp + + class _GetInstance(_BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> memorystore.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.memorystore.GetInstanceRequest): + The request object. Request message for [GetInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.memorystore.Instance: + A Memorystore instance. + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + request, metadata = self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.Instance() + pb_resp = memorystore.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ListInstances(_BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.ListInstances") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: memorystore.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> memorystore.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.memorystore.ListInstancesRequest): + The request object. Request message for [ListInstances][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.memorystore.ListInstancesResponse: + Response message for [ListInstances][]. + """ + + http_options = _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + request, metadata = self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.ListInstancesResponse() + pb_resp = memorystore.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _UpdateInstance(_BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.UpdateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: memorystore.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.memorystore.UpdateInstanceRequest): + The request object. Request message for [UpdateInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + request, metadata = self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + @property + def create_instance(self) -> Callable[ + [memorystore.CreateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [memorystore.DeleteInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_certificate_authority(self) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + memorystore.CertificateAuthority]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [memorystore.GetInstanceRequest], + memorystore.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [memorystore.ListInstancesRequest], + memorystore.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [memorystore.UpdateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + # Send the request + response = MemorystoreRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MemorystoreRestTransport', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py new file mode 100644 index 000000000000..7374ad3d00d2 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import MemorystoreTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.memorystore_v1beta.types import memorystore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseMemorystoreRestTransport(MemorystoreTransport): + """Base REST backend transport for Memorystore. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'memorystore.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'memorystore.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{parent=projects/*/locations/*}/instances', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetCertificateAuthority: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetCertificateAuthorityRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstances: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{parent=projects/*/locations/*}/instances', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1beta/{instance.name=projects/*/locations/*/instances/*}', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1beta/{name=projects/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseMemorystoreRestTransport', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py new file mode 100644 index 000000000000..fc13543f2db2 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .memorystore import ( + CertificateAuthority, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + UpdateInstanceRequest, + ZoneDistributionConfig, + ConnectionType, + PscConnectionStatus, +) + +__all__ = ( + 'CertificateAuthority', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'DiscoveryEndpoint', + 'GetCertificateAuthorityRequest', + 'GetInstanceRequest', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'NodeConfig', + 'OperationMetadata', + 'PersistenceConfig', + 'PscAutoConnection', + 'PscConnection', + 'UpdateInstanceRequest', + 'ZoneDistributionConfig', + 'ConnectionType', + 'PscConnectionStatus', +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py new file mode 100644 index 000000000000..dd56d5678ceb --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py @@ -0,0 +1,1225 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.memorystore.v1beta', + manifest={ + 'PscConnectionStatus', + 'ConnectionType', + 'Instance', + 'PscAutoConnection', + 'PscConnection', + 'DiscoveryEndpoint', + 'PersistenceConfig', + 'NodeConfig', + 'ZoneDistributionConfig', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'UpdateInstanceRequest', + 'DeleteInstanceRequest', + 'GetCertificateAuthorityRequest', + 'CertificateAuthority', + 'OperationMetadata', + }, +) + + +class PscConnectionStatus(proto.Enum): + r"""Status of the PSC connection. + + Values: + PSC_CONNECTION_STATUS_UNSPECIFIED (0): + PSC connection status is not specified. + ACTIVE (1): + The connection is active + NOT_FOUND (2): + Connection not found + """ + PSC_CONNECTION_STATUS_UNSPECIFIED = 0 + ACTIVE = 1 + NOT_FOUND = 2 + + +class ConnectionType(proto.Enum): + r"""Type of a PSC connection + + Values: + CONNECTION_TYPE_UNSPECIFIED (0): + Connection Type is not set + CONNECTION_TYPE_DISCOVERY (1): + Connection that will be used for topology + discovery. + CONNECTION_TYPE_PRIMARY (2): + Connection that will be used as primary + endpoint to access primary. + CONNECTION_TYPE_READER (3): + Connection that will be used as reader + endpoint to access replicas. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + CONNECTION_TYPE_DISCOVERY = 1 + CONNECTION_TYPE_PRIMARY = 2 + CONNECTION_TYPE_READER = 3 + + +class Instance(proto.Message): + r"""A Memorystore instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. Unique name of the instance. + Format: + projects/{project}/locations/{location}/instances/{instance} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation timestamp of the + instance. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Latest update timestamp of the + instance. + labels (MutableMapping[str, str]): + Optional. Labels to represent user-provided + metadata. + state (google.cloud.memorystore_v1beta.types.Instance.State): + Output only. Current state of the instance. + state_info (google.cloud.memorystore_v1beta.types.Instance.StateInfo): + Output only. Additional information about the + state of the instance. + uid (str): + Output only. System assigned, unique + identifier for the instance. + replica_count (int): + Optional. Number of replica nodes per shard. + If omitted the default is 0 replicas. + + This field is a member of `oneof`_ ``_replica_count``. + authorization_mode (google.cloud.memorystore_v1beta.types.Instance.AuthorizationMode): + Optional. Immutable. Authorization mode of + the instance. + transit_encryption_mode (google.cloud.memorystore_v1beta.types.Instance.TransitEncryptionMode): + Optional. Immutable. In-transit encryption + mode of the instance. + shard_count (int): + Optional. Number of shards for the instance. + discovery_endpoints (MutableSequence[google.cloud.memorystore_v1beta.types.DiscoveryEndpoint]): + Output only. Endpoints clients can connect to + the instance through. Currently only one + discovery endpoint is supported. + node_type (google.cloud.memorystore_v1beta.types.Instance.NodeType): + Optional. Immutable. Machine type for + individual nodes of the instance. + persistence_config (google.cloud.memorystore_v1beta.types.PersistenceConfig): + Optional. Persistence configuration of the + instance. + engine_version (str): + Optional. Immutable. Engine version of the + instance. + engine_configs (MutableMapping[str, str]): + Optional. User-provided engine configurations + for the instance. + node_config (google.cloud.memorystore_v1beta.types.NodeConfig): + Output only. Configuration of individual + nodes of the instance. + zone_distribution_config (google.cloud.memorystore_v1beta.types.ZoneDistributionConfig): + Optional. Immutable. Zone distribution + configuration of the instance for node + allocation. + deletion_protection_enabled (bool): + Optional. If set to true deletion of the + instance will fail. + + This field is a member of `oneof`_ ``_deletion_protection_enabled``. + psc_auto_connections (MutableSequence[google.cloud.memorystore_v1beta.types.PscAutoConnection]): + Required. Immutable. User inputs and resource + details of the auto-created PSC connections. + endpoints (MutableSequence[google.cloud.memorystore_v1beta.types.Instance.InstanceEndpoint]): + Optional. Endpoints for the instance. + mode (google.cloud.memorystore_v1beta.types.Instance.Mode): + Optional. The mode config for the instance. + """ + class State(proto.Enum): + r"""Possible states of the instance. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Instance is being created. + ACTIVE (2): + Instance has been created and is usable. + UPDATING (3): + Instance is being updated. + DELETING (4): + Instance is being deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + UPDATING = 3 + DELETING = 4 + + class AuthorizationMode(proto.Enum): + r"""Possible authorization modes of the instance. + + Values: + AUTHORIZATION_MODE_UNSPECIFIED (0): + Not set. + AUTH_DISABLED (1): + Authorization disabled. + IAM_AUTH (2): + IAM basic authorization. + """ + AUTHORIZATION_MODE_UNSPECIFIED = 0 + AUTH_DISABLED = 1 + IAM_AUTH = 2 + + class TransitEncryptionMode(proto.Enum): + r"""Possible in-transit encryption modes of the instance. + + Values: + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED (0): + Not set. + TRANSIT_ENCRYPTION_DISABLED (1): + In-transit encryption is disabled. + SERVER_AUTHENTICATION (2): + Server-managed encryption is used for + in-transit encryption. + """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 + TRANSIT_ENCRYPTION_DISABLED = 1 + SERVER_AUTHENTICATION = 2 + + class NodeType(proto.Enum): + r"""Possible node types of the instance. See + https://cloud.google.com/memorystore/docs/valkey/instance-node-specification + for more information. + + Values: + NODE_TYPE_UNSPECIFIED (0): + Not set. + SHARED_CORE_NANO (1): + Shared core nano. + HIGHMEM_MEDIUM (2): + High memory medium. + HIGHMEM_XLARGE (3): + High memory extra large. + STANDARD_SMALL (4): + Standard small. + """ + NODE_TYPE_UNSPECIFIED = 0 + SHARED_CORE_NANO = 1 + HIGHMEM_MEDIUM = 2 + HIGHMEM_XLARGE = 3 + STANDARD_SMALL = 4 + + class Mode(proto.Enum): + r"""The mode config, which is used to enable/disable cluster + mode. + + Values: + MODE_UNSPECIFIED (0): + Mode is not specified. + STANDALONE (1): + Instance is in standalone mode. + CLUSTER (2): + Instance is in cluster mode. + """ + MODE_UNSPECIFIED = 0 + STANDALONE = 1 + CLUSTER = 2 + + class StateInfo(proto.Message): + r"""Additional information about the state of the instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + update_info (google.cloud.memorystore_v1beta.types.Instance.StateInfo.UpdateInfo): + Output only. Describes ongoing update when + instance state is UPDATING. + + This field is a member of `oneof`_ ``info``. + """ + + class UpdateInfo(proto.Message): + r"""Represents information about instance with state UPDATING. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_shard_count (int): + Output only. Target number of shards for the + instance. + + This field is a member of `oneof`_ ``_target_shard_count``. + target_replica_count (int): + Output only. Target number of replica nodes + per shard for the instance. + + This field is a member of `oneof`_ ``_target_replica_count``. + """ + + target_shard_count: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + target_replica_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + update_info: 'Instance.StateInfo.UpdateInfo' = proto.Field( + proto.MESSAGE, + number=1, + oneof='info', + message='Instance.StateInfo.UpdateInfo', + ) + + class InstanceEndpoint(proto.Message): + r"""InstanceEndpoint consists of PSC connections that are created + as a group in each VPC network for accessing the instance. In + each group, there shall be one connection for each service + attachment in the cluster. + + Attributes: + connections (MutableSequence[google.cloud.memorystore_v1beta.types.Instance.ConnectionDetail]): + Optional. A group of PSC connections. They + are created in the same VPC network, one for + each service attachment in the cluster. + """ + + connections: MutableSequence['Instance.ConnectionDetail'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance.ConnectionDetail', + ) + + class ConnectionDetail(proto.Message): + r"""Information of each PSC connection. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + psc_auto_connection (google.cloud.memorystore_v1beta.types.PscAutoConnection): + Detailed information of a PSC connection that + is created through service connectivity + automation. + + This field is a member of `oneof`_ ``connection``. + psc_connection (google.cloud.memorystore_v1beta.types.PscConnection): + Detailed information of a PSC connection that + is created by the user. + + This field is a member of `oneof`_ ``connection``. + """ + + psc_auto_connection: 'PscAutoConnection' = proto.Field( + proto.MESSAGE, + number=1, + oneof='connection', + message='PscAutoConnection', + ) + psc_connection: 'PscConnection' = proto.Field( + proto.MESSAGE, + number=2, + oneof='connection', + message='PscConnection', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + state_info: StateInfo = proto.Field( + proto.MESSAGE, + number=6, + message=StateInfo, + ) + uid: str = proto.Field( + proto.STRING, + number=7, + ) + replica_count: int = proto.Field( + proto.INT32, + number=8, + optional=True, + ) + authorization_mode: AuthorizationMode = proto.Field( + proto.ENUM, + number=9, + enum=AuthorizationMode, + ) + transit_encryption_mode: TransitEncryptionMode = proto.Field( + proto.ENUM, + number=10, + enum=TransitEncryptionMode, + ) + shard_count: int = proto.Field( + proto.INT32, + number=11, + ) + discovery_endpoints: MutableSequence['DiscoveryEndpoint'] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message='DiscoveryEndpoint', + ) + node_type: NodeType = proto.Field( + proto.ENUM, + number=13, + enum=NodeType, + ) + persistence_config: 'PersistenceConfig' = proto.Field( + proto.MESSAGE, + number=14, + message='PersistenceConfig', + ) + engine_version: str = proto.Field( + proto.STRING, + number=15, + ) + engine_configs: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + node_config: 'NodeConfig' = proto.Field( + proto.MESSAGE, + number=17, + message='NodeConfig', + ) + zone_distribution_config: 'ZoneDistributionConfig' = proto.Field( + proto.MESSAGE, + number=18, + message='ZoneDistributionConfig', + ) + deletion_protection_enabled: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + psc_auto_connections: MutableSequence['PscAutoConnection'] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message='PscAutoConnection', + ) + endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=InstanceEndpoint, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=26, + enum=Mode, + ) + + +class PscAutoConnection(proto.Message): + r"""Details of consumer resources in a PSC connection. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + port (int): + Optional. Output only. port will only be set + for Primary/Reader or Discovery endpoint. + + This field is a member of `oneof`_ ``ports``. + psc_connection_id (str): + Output only. The PSC connection id of the + forwarding rule connected to the service + attachment. + ip_address (str): + Output only. The IP allocated on the consumer + network for the PSC forwarding rule. + forwarding_rule (str): + Output only. The URI of the consumer side forwarding rule. + Format: + projects/{project}/regions/{region}/forwardingRules/{forwarding_rule} + project_id (str): + Required. The consumer project_id where PSC connections are + established. This should be the same project_id that the + instance is being created in. + network (str): + Required. The network where the PSC endpoints are created, + in the form of + projects/{project_id}/global/networks/{network_id}. + service_attachment (str): + Output only. The service attachment which is + the target of the PSC connection, in the form of + projects/{project-id}/regions/{region}/serviceAttachments/{service-attachment-id}. + psc_connection_status (google.cloud.memorystore_v1beta.types.PscConnectionStatus): + Output only. The status of the PSC + connection: whether a connection exists and + ACTIVE or it no longer exists. Please note that + this value is updated periodically. Please use + Private Service Connect APIs for the latest + status. + connection_type (google.cloud.memorystore_v1beta.types.ConnectionType): + Output only. Type of the PSC connection. + """ + + port: int = proto.Field( + proto.INT32, + number=9, + oneof='ports', + ) + psc_connection_id: str = proto.Field( + proto.STRING, + number=1, + ) + ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=6, + ) + psc_connection_status: 'PscConnectionStatus' = proto.Field( + proto.ENUM, + number=7, + enum='PscConnectionStatus', + ) + connection_type: 'ConnectionType' = proto.Field( + proto.ENUM, + number=8, + enum='ConnectionType', + ) + + +class PscConnection(proto.Message): + r"""User created Psc connection configuration. + + Attributes: + psc_connection_id (str): + Output only. The PSC connection id of the + forwarding rule connected to the service + attachment. + ip_address (str): + Required. The IP allocated on the consumer + network for the PSC forwarding rule. + forwarding_rule (str): + Required. The URI of the consumer side forwarding rule. + Format: + projects/{project}/regions/{region}/forwardingRules/{forwarding_rule} + project_id (str): + Output only. The consumer project_id where the forwarding + rule is created from. + network (str): + Required. The consumer network where the IP address resides, + in the form of + projects/{project_id}/global/networks/{network_id}. + service_attachment (str): + Required. The service attachment which is the + target of the PSC connection, in the form of + projects/{project-id}/regions/{region}/serviceAttachments/{service-attachment-id}. + psc_connection_status (google.cloud.memorystore_v1beta.types.PscConnectionStatus): + Output only. The status of the PSC + connection: whether a connection exists and + ACTIVE or it no longer exists. Please note that + this value is updated periodically. Please use + Private Service Connect APIs for the latest + status. + connection_type (google.cloud.memorystore_v1beta.types.ConnectionType): + Output only. Type of the PSC connection. + """ + + psc_connection_id: str = proto.Field( + proto.STRING, + number=1, + ) + ip_address: str = proto.Field( + proto.STRING, + number=2, + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=4, + ) + network: str = proto.Field( + proto.STRING, + number=5, + ) + service_attachment: str = proto.Field( + proto.STRING, + number=6, + ) + psc_connection_status: 'PscConnectionStatus' = proto.Field( + proto.ENUM, + number=7, + enum='PscConnectionStatus', + ) + connection_type: 'ConnectionType' = proto.Field( + proto.ENUM, + number=8, + enum='ConnectionType', + ) + + +class DiscoveryEndpoint(proto.Message): + r"""Represents an endpoint for clients to connect to the + instance. + + Attributes: + address (str): + Output only. IP address of the exposed + endpoint clients connect to. + port (int): + Output only. The port number of the exposed + endpoint. + network (str): + Output only. The network where the IP address of the + discovery endpoint will be reserved, in the form of + projects/{network_project}/global/networks/{network_id}. + """ + + address: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + network: str = proto.Field( + proto.STRING, + number=4, + ) + + +class PersistenceConfig(proto.Message): + r"""Represents persistence configuration for a instance. + + Attributes: + mode (google.cloud.memorystore_v1beta.types.PersistenceConfig.PersistenceMode): + Optional. Current persistence mode. + rdb_config (google.cloud.memorystore_v1beta.types.PersistenceConfig.RDBConfig): + Optional. RDB configuration. This field will + be ignored if mode is not RDB. + aof_config (google.cloud.memorystore_v1beta.types.PersistenceConfig.AOFConfig): + Optional. AOF configuration. This field will + be ignored if mode is not AOF. + """ + class PersistenceMode(proto.Enum): + r"""Possible persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled, and any snapshot + data is deleted. + RDB (2): + RDB based persistence is enabled. + AOF (3): + AOF based persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + AOF = 3 + + class RDBConfig(proto.Message): + r"""Configuration for RDB based persistence. + + Attributes: + rdb_snapshot_period (google.cloud.memorystore_v1beta.types.PersistenceConfig.RDBConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Time that the first snapshot + was/will be attempted, and to which future + snapshots will be aligned. If not provided, the + current time will be used. + """ + class SnapshotPeriod(proto.Enum): + r"""Possible snapshot periods. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (1): + One hour. + SIX_HOURS (2): + Six hours. + TWELVE_HOURS (3): + Twelve hours. + TWENTY_FOUR_HOURS (4): + Twenty four hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 1 + SIX_HOURS = 2 + TWELVE_HOURS = 3 + TWENTY_FOUR_HOURS = 4 + + rdb_snapshot_period: 'PersistenceConfig.RDBConfig.SnapshotPeriod' = proto.Field( + proto.ENUM, + number=1, + enum='PersistenceConfig.RDBConfig.SnapshotPeriod', + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class AOFConfig(proto.Message): + r"""Configuration for AOF based persistence. + + Attributes: + append_fsync (google.cloud.memorystore_v1beta.types.PersistenceConfig.AOFConfig.AppendFsync): + Optional. The fsync mode. + """ + class AppendFsync(proto.Enum): + r"""Possible fsync modes. + + Values: + APPEND_FSYNC_UNSPECIFIED (0): + Not set. Default: EVERY_SEC + NEVER (1): + Never fsync. Normally Linux will flush data + every 30 seconds with this configuration, but + it's up to the kernel's exact tuning. + EVERY_SEC (2): + Fsync every second. You may lose 1 second of + data if there is a disaster. + ALWAYS (3): + Fsync every time new write commands are + appended to the AOF. The best data loss + protection at the cost of performance. + """ + APPEND_FSYNC_UNSPECIFIED = 0 + NEVER = 1 + EVERY_SEC = 2 + ALWAYS = 3 + + append_fsync: 'PersistenceConfig.AOFConfig.AppendFsync' = proto.Field( + proto.ENUM, + number=1, + enum='PersistenceConfig.AOFConfig.AppendFsync', + ) + + mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_config: RDBConfig = proto.Field( + proto.MESSAGE, + number=2, + message=RDBConfig, + ) + aof_config: AOFConfig = proto.Field( + proto.MESSAGE, + number=3, + message=AOFConfig, + ) + + +class NodeConfig(proto.Message): + r"""Represents configuration for nodes of the instance. + + Attributes: + size_gb (float): + Output only. Memory size in GB of the node. + """ + + size_gb: float = proto.Field( + proto.DOUBLE, + number=1, + ) + + +class ZoneDistributionConfig(proto.Message): + r"""Zone distribution configuration for allocation of instance + resources. + + Attributes: + zone (str): + Optional. Defines zone where all resources will be allocated + with SINGLE_ZONE mode. Ignored for MULTI_ZONE mode. + mode (google.cloud.memorystore_v1beta.types.ZoneDistributionConfig.ZoneDistributionMode): + Optional. Current zone distribution mode. Defaults to + MULTI_ZONE. + """ + class ZoneDistributionMode(proto.Enum): + r"""Possible zone distribution modes. + + Values: + ZONE_DISTRIBUTION_MODE_UNSPECIFIED (0): + Not Set. Default: MULTI_ZONE + MULTI_ZONE (1): + Distribute resources across 3 zones picked at + random within the region. + SINGLE_ZONE (2): + Provision resources in a single zone. Zone + field must be specified. + """ + ZONE_DISTRIBUTION_MODE_UNSPECIFIED = 0 + MULTI_ZONE = 1 + SINGLE_ZONE = 2 + + zone: str = proto.Field( + proto.STRING, + number=2, + ) + mode: ZoneDistributionMode = proto.Field( + proto.ENUM, + number=1, + enum=ZoneDistributionMode, + ) + + +class ListInstancesRequest(proto.Message): + r"""Request message for [ListInstances][]. + + Attributes: + parent (str): + Required. The parent to list instances from. + Format: projects/{project}/locations/{location} + page_size (int): + Optional. Requested page size. Server may + return fewer items than requested. If + unspecified, server will pick an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results the server should return. + filter (str): + Optional. Expression for filtering results. + order_by (str): + Optional. Sort results by a defined order. Supported values: + "name", "create_time". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInstancesResponse(proto.Message): + r"""Response message for [ListInstances][]. + + Attributes: + instances (MutableSequence[google.cloud.memorystore_v1beta.types.Instance]): + If the {location} requested was "-" the + response contains a list of instances from all + locations. Instances in unreachable locations + will be omitted. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Request message for [GetInstance][]. + + Attributes: + name (str): + Required. The name of the instance to + retrieve. Format: + projects/{project}/locations/{location}/instances/{instance} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Request message for [CreateInstance][]. + + Attributes: + parent (str): + Required. The parent resource where this + instance will be created. Format: + projects/{project}/locations/{location} + instance_id (str): + Required. The ID to use for the instance, which will become + the final component of the instance's resource name. + + This value is subject to the following restrictions: + + - Must be 4-63 characters in length + - Must begin with a letter or digit + - Must contain only lowercase letters, digits, and hyphens + - Must not end with a hyphen + - Must be unique within a location + instance (google.cloud.memorystore_v1beta.types.Instance): + Required. The instance to create. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Request message for [UpdateInstance][]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to be updated on + the instance. At least one field must be + specified. + instance (google.cloud.memorystore_v1beta.types.Instance): + Required. The instance to update. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=2, + message='Instance', + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Request message for [DeleteInstance][]. + + Attributes: + name (str): + Required. The name of the instance to delete. + Format: + projects/{project}/locations/{location}/instances/{instance} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCertificateAuthorityRequest(proto.Message): + r"""Request message for [GetCertificateAuthority][]. + + Attributes: + name (str): + Required. The name of the certificate + authority. Format: + + projects/{project}/locations/{location}/instances/{instance}/certificateAuthority + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CertificateAuthority(proto.Message): + r"""A certificate authority for an instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_server_ca (google.cloud.memorystore_v1beta.types.CertificateAuthority.ManagedCertificateAuthority): + A managed server certificate authority. + + This field is a member of `oneof`_ ``server_ca``. + name (str): + Identifier. Unique name of the certificate + authority. Format: + + projects/{project}/locations/{location}/instances/{instance} + """ + + class ManagedCertificateAuthority(proto.Message): + r"""A managed certificate authority. + + Attributes: + ca_certs (MutableSequence[google.cloud.memorystore_v1beta.types.CertificateAuthority.ManagedCertificateAuthority.CertChain]): + PEM encoded CA certificate chains for managed + server authentication. + """ + + class CertChain(proto.Message): + r"""A certificate chain. + + Attributes: + certificates (MutableSequence[str]): + The certificates that form the CA chain in + order of leaf to root. + """ + + certificates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + ca_certs: MutableSequence['CertificateAuthority.ManagedCertificateAuthority.CertChain'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='CertificateAuthority.ManagedCertificateAuthority.CertChain', + ) + + managed_server_ca: ManagedCertificateAuthority = proto.Field( + proto.MESSAGE, + number=2, + oneof='server_ca', + message=ManagedCertificateAuthority, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have been + cancelled successfully have [Operation.error][] value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini b/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py b/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py new file mode 100644 index 000000000000..9d2f5c2af33c --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-memorystore' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/memorystore_v1beta/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/memorystore_v1beta/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py new file mode 100644 index 000000000000..20339b56abd7 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_create_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1beta.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1beta.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1beta_generated_Memorystore_CreateInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py new file mode 100644 index 000000000000..260bb36f112a --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_delete_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1beta_generated_Memorystore_DeleteInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py new file mode 100644 index 000000000000..a6b9535aa668 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCertificateAuthority +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_GetCertificateAuthority_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_get_certificate_authority(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.GetCertificateAuthorityRequest( + name="name_value", + ) + + # Make the request + response = client.get_certificate_authority(request=request) + + # Handle the response + print(response) + +# [END memorystore_v1beta_generated_Memorystore_GetCertificateAuthority_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py new file mode 100644 index 000000000000..19f923dede64 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_get_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END memorystore_v1beta_generated_Memorystore_GetInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py new file mode 100644 index 000000000000..f310bb93e602 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_list_instances(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1beta.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END memorystore_v1beta_generated_Memorystore_ListInstances_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py new file mode 100644 index 000000000000..ea5f6ea6077d --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1beta_generated_Memorystore_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1beta + + +def sample_update_instance(): + # Create a client + client = memorystore_v1beta.MemorystoreClient() + + # Initialize request argument(s) + instance = memorystore_v1beta.Instance() + instance.psc_auto_connections.port = 453 + instance.psc_auto_connections.project_id = "project_id_value" + instance.psc_auto_connections.network = "network_value" + + request = memorystore_v1beta.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1beta_generated_Memorystore_UpdateInstance_sync] diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json new file mode 100644 index 000000000000..40744f187587 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json @@ -0,0 +1,507 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.memorystore.v1beta", + "version": "v1beta" + } + ], + "language": "PYTHON", + "name": "google-cloud-memorystore", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.create_instance", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.CreateInstance", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.memorystore_v1beta.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "memorystore_v1beta_generated_memorystore_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_CreateInstance_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.delete_instance", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.DeleteInstance", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "memorystore_v1beta_generated_memorystore_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.get_certificate_authority", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.GetCertificateAuthority", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "GetCertificateAuthority" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.GetCertificateAuthorityRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memorystore_v1beta.types.CertificateAuthority", + "shortName": "get_certificate_authority" + }, + "description": "Sample for GetCertificateAuthority", + "file": "memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_GetCertificateAuthority_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.get_instance", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.GetInstance", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memorystore_v1beta.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "memorystore_v1beta_generated_memorystore_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.list_instances", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.ListInstances", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.memorystore_v1beta.services.memorystore.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "memorystore_v1beta_generated_memorystore_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1beta.MemorystoreClient.update_instance", + "method": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore.UpdateInstance", + "service": { + "fullName": "google.cloud.memorystore.v1beta.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1beta.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.memorystore_v1beta.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "memorystore_v1beta_generated_memorystore_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1beta_generated_Memorystore_UpdateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1beta_generated_memorystore_update_instance_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py b/owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py new file mode 100644 index 000000000000..f79de1f612d8 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py @@ -0,0 +1,181 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class memorystoreCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', ), + 'delete_instance': ('name', 'request_id', ), + 'get_certificate_authority': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_instance': ('instance', 'update_mask', 'request_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=memorystoreCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the memorystore client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py b/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py new file mode 100644 index 000000000000..3cb03e5a17ca --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-memorystore' + + +description = "Google Cloud Memorystore API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/memorystore/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt new file mode 100644 index 000000000000..fc812592b0ee --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py new file mode 100644 index 000000000000..7b3de3117f38 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py new file mode 100644 index 000000000000..25ba80cdffb7 --- /dev/null +++ b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py @@ -0,0 +1,3401 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.memorystore_v1beta.services.memorystore import MemorystoreClient +from google.cloud.memorystore_v1beta.services.memorystore import pagers +from google.cloud.memorystore_v1beta.services.memorystore import transports +from google.cloud.memorystore_v1beta.types import memorystore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MemorystoreClient._get_default_mtls_endpoint(None) is None + assert MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MemorystoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert MemorystoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MemorystoreClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MemorystoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + MemorystoreClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MemorystoreClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MemorystoreClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MemorystoreClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MemorystoreClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MemorystoreClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MemorystoreClient._get_client_cert_source(None, False) is None + assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert MemorystoreClient._get_client_cert_source(None, True) is mock_default_cert_source + assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MemorystoreClient._DEFAULT_UNIVERSE + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert MemorystoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + assert MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert MemorystoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert MemorystoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert MemorystoreClient._get_universe_domain(None, None) == MemorystoreClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + MemorystoreClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MemorystoreClient, "rest"), +]) +def test_memorystore_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'memorystore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://memorystore.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MemorystoreRestTransport, "rest"), +]) +def test_memorystore_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MemorystoreClient, "rest"), +]) +def test_memorystore_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'memorystore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://memorystore.googleapis.com' + ) + + +def test_memorystore_client_get_transport_class(): + transport = MemorystoreClient.get_transport_class() + available_transports = [ + transports.MemorystoreRestTransport, + ] + assert transport in available_transports + + transport = MemorystoreClient.get_transport_class("rest") + assert transport == transports.MemorystoreRestTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), +]) +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +def test_memorystore_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), +]) +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_memorystore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MemorystoreClient +]) +@mock.patch.object(MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient)) +def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + MemorystoreClient +]) +@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +def test_memorystore_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MemorystoreClient._DEFAULT_UNIVERSE + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), +]) +def test_memorystore_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), +]) +def test_memorystore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields(request_type=memorystore.ListInstancesRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instances_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + memorystore.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + memorystore.ListInstancesResponse( + instances=[ + memorystore.Instance(), + memorystore.Instance(), + memorystore.Instance(), + ], + next_page_token='abc', + ), + memorystore.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + memorystore.ListInstancesResponse( + instances=[ + memorystore.Instance(), + ], + next_page_token='ghi', + ), + memorystore.ListInstancesResponse( + instances=[ + memorystore.Instance(), + memorystore.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(memorystore.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, memorystore.Instance) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + memorystore.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields(request_type=memorystore.CreateInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id", "request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instanceId", "requestId", )) & set(("parent", "instanceId", "instance", ))) + + +def test_create_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance=memorystore.Instance(name='name_value'), + instance_id='instance_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_create_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + memorystore.CreateInstanceRequest(), + parent='parent_value', + instance=memorystore.Instance(name='name_value'), + instance_id='instance_id_value', + ) + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields(request_type=memorystore.UpdateInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("instance", ))) + + +def test_update_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + instance=memorystore.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_update_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + memorystore.UpdateInstanceRequest(), + instance=memorystore.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteInstanceRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + + +def test_delete_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_delete_instance_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + memorystore.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_certificate_authority in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_certificate_authority] = mock_rpc + + request = {} + client.get_certificate_authority(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_certificate_authority(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_certificate_authority_rest_required_fields(request_type=memorystore.GetCertificateAuthorityRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_certificate_authority(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_certificate_authority_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_certificate_authority._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_certificate_authority_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_certificate_authority(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority" % client.transport._host, args[1]) + + +def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest'): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_certificate_authority( + memorystore.GetCertificateAuthorityRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MemorystoreClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize("transport_class", [ + transports.MemorystoreRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_rest(): + transport = MemorystoreClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_instances(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.ListInstancesRequest.pb(memorystore.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = memorystore.ListInstancesResponse.to_json(memorystore.ListInstancesResponse()) + req.return_value.content = return_value + + request = memorystore.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.ListInstancesResponse() + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance( + name='name_value', + state=memorystore.Instance.State.CREATING, + uid='uid_value', + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version='engine_version_value', + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.Instance) + assert response.name == 'name_value' + assert response.state == memorystore.Instance.State.CREATING + assert response.uid == 'uid_value' + assert response.replica_count == 1384 + assert response.authorization_mode == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + assert response.transit_encryption_mode == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + assert response.shard_count == 1178 + assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO + assert response.engine_version == 'engine_version_value' + assert response.deletion_protection_enabled is True + assert response.mode == memorystore.Instance.Mode.STANDALONE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = memorystore.Instance.to_json(memorystore.Instance()) + req.return_value.content = return_value + + request = memorystore.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.Instance() + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = memorystore.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.CreateInstanceRequest.pb(memorystore.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = memorystore.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.UpdateInstanceRequest.pb(memorystore.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.DeleteInstanceRequest.pb(memorystore.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_certificate_authority_rest_bad_request(request_type=memorystore.GetCertificateAuthorityRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_certificate_authority(request) + + +@pytest.mark.parametrize("request_type", [ + memorystore.GetCertificateAuthorityRequest, + dict, +]) +def test_get_certificate_authority_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_certificate_authority(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.CertificateAuthority) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_certificate_authority_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_certificate_authority") as post, \ + mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_certificate_authority") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = memorystore.GetCertificateAuthorityRequest.pb(memorystore.GetCertificateAuthorityRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = memorystore.CertificateAuthority.to_json(memorystore.CertificateAuthority()) + req.return_value.content = return_value + + request = memorystore.GetCertificateAuthorityRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.CertificateAuthority() + + client.get_certificate_authority(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_certificate_authority_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_certificate_authority), + '__call__') as call: + client.get_certificate_authority(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetCertificateAuthorityRequest() + + assert args[0] == request_msg + + +def test_memorystore_rest_lro_client(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_memorystore_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MemorystoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_memorystore_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.MemorystoreTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instances', + 'get_instance', + 'create_instance', + 'update_instance', + 'delete_instance', + 'get_certificate_authority', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_memorystore_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MemorystoreTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_memorystore_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MemorystoreTransport() + adc.assert_called_once() + + +def test_memorystore_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MemorystoreClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +def test_memorystore_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MemorystoreRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_memorystore_host_no_port(transport_name): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'memorystore.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://memorystore.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_memorystore_host_with_port(transport_name): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'memorystore.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://memorystore.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_memorystore_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MemorystoreClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MemorystoreClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.get_certificate_authority._session + session2 = client2.transport.get_certificate_authority._session + assert session1 != session2 + +def test_certificate_authority_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + actual = MemorystoreClient.certificate_authority_path(project, location, instance) + assert expected == actual + + +def test_parse_certificate_authority_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = MemorystoreClient.certificate_authority_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_certificate_authority_path(path) + assert expected == actual + +def test_forwarding_rule_path(): + project = "cuttlefish" + region = "mussel" + forwarding_rule = "winkle" + expected = "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + actual = MemorystoreClient.forwarding_rule_path(project, region, forwarding_rule) + assert expected == actual + + +def test_parse_forwarding_rule_path(): + expected = { + "project": "nautilus", + "region": "scallop", + "forwarding_rule": "abalone", + } + path = MemorystoreClient.forwarding_rule_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_forwarding_rule_path(path) + assert expected == actual + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + actual = MemorystoreClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = MemorystoreClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_instance_path(path) + assert expected == actual + +def test_network_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + actual = MemorystoreClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = MemorystoreClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_network_path(path) + assert expected == actual + +def test_service_attachment_path(): + project = "scallop" + region = "abalone" + service_attachment = "squid" + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + actual = MemorystoreClient.service_attachment_path(project, region, service_attachment) + assert expected == actual + + +def test_parse_service_attachment_path(): + expected = { + "project": "clam", + "region": "whelk", + "service_attachment": "octopus", + } + path = MemorystoreClient.service_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_service_attachment_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MemorystoreClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MemorystoreClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = MemorystoreClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MemorystoreClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MemorystoreClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MemorystoreClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = MemorystoreClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MemorystoreClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MemorystoreClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MemorystoreClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + transport_class = MemorystoreClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + ] + for transport in transports: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MemorystoreClient, transports.MemorystoreRestTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From b28fd876cbf93f1357cddd86336b90463a23a896 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 26 Nov 2024 12:42:14 +0000 Subject: [PATCH 4/6] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../google-cloud-memorystore/v1/MANIFEST.in | 2 - .../google-cloud-memorystore/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../v1/docs/index.rst | 7 - .../v1/google/cloud/memorystore/__init__.py | 61 - .../google/cloud/memorystore_v1/__init__.py | 62 - .../google-cloud-memorystore/v1/noxfile.py | 280 --- .../google-cloud-memorystore/v1/setup.py | 98 - .../v1beta/.coveragerc | 13 - .../v1beta/MANIFEST.in | 2 - .../v1beta/README.rst | 49 - .../v1beta/docs/_static/custom.css | 3 - .../v1beta/docs/conf.py | 376 --- .../v1beta/docs/index.rst | 7 - .../google/cloud/memorystore/__init__.py | 61 - .../cloud/memorystore_v1beta/__init__.py | 62 - .../cloud/memorystore_v1beta/gapic_version.py | 16 - .../google/cloud/memorystore_v1beta/py.typed | 2 - .../google-cloud-memorystore/v1beta/mypy.ini | 3 - .../v1beta/noxfile.py | 280 --- .../v1beta/testing/constraints-3.10.txt | 6 - .../v1beta/testing/constraints-3.11.txt | 6 - .../v1beta/testing/constraints-3.12.txt | 6 - .../v1beta/testing/constraints-3.13.txt | 6 - .../v1beta/testing/constraints-3.7.txt | 10 - .../v1beta/testing/constraints-3.8.txt | 6 - .../v1beta/testing/constraints-3.9.txt | 6 - .../v1beta/tests/__init__.py | 16 - .../v1beta/tests/unit/__init__.py | 16 - .../v1beta/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/memorystore_v1beta/__init__.py | 16 - .../google-cloud-memorystore}/.coveragerc | 0 .../google-cloud-memorystore}/.flake8 | 4 +- packages/google-cloud-memorystore/.gitignore | 63 + .../google-cloud-memorystore/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../google-cloud-memorystore/CONTRIBUTING.rst | 273 ++ packages/google-cloud-memorystore/LICENSE | 202 ++ .../google-cloud-memorystore/MANIFEST.in | 24 +- packages/google-cloud-memorystore/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../google-cloud-memorystore/docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../google-cloud-memorystore}/docs/conf.py | 72 +- .../google-cloud-memorystore/docs/index.rst | 39 + .../docs/memorystore_v1/memorystore.rst | 0 .../docs/memorystore_v1/services_.rst | 0 .../docs/memorystore_v1/types_.rst | 0 .../docs/memorystore_v1beta/memorystore.rst | 0 .../docs/memorystore_v1beta/services_.rst | 0 .../docs/memorystore_v1beta/types_.rst | 0 .../docs/multiprocessing.rst | 7 + .../docs/summary_overview.md | 22 + .../google/cloud/memorystore/__init__.py | 63 + .../google/cloud/memorystore/gapic_version.py | 0 .../google/cloud/memorystore/py.typed | 0 .../google/cloud/memorystore_v1/__init__.py | 63 + .../cloud/memorystore_v1/gapic_metadata.json | 0 .../cloud/memorystore_v1/gapic_version.py | 0 .../google/cloud/memorystore_v1/py.typed | 0 .../cloud/memorystore_v1/services/__init__.py | 0 .../services/memorystore/__init__.py | 4 +- .../services/memorystore/client.py | 561 +++-- .../services/memorystore/pagers.py | 46 +- .../memorystore/transports/README.rst | 0 .../memorystore/transports/__init__.py | 12 +- .../services/memorystore/transports/base.py | 192 +- .../services/memorystore/transports/rest.py | 916 ++++--- .../memorystore/transports/rest_base.py | 344 +-- .../cloud/memorystore_v1/types/__init__.py | 40 +- .../cloud/memorystore_v1/types/memorystore.py | 135 +- .../cloud/memorystore_v1beta/__init__.py | 63 + .../memorystore_v1beta/gapic_metadata.json | 0 .../memorystore_v1beta}/gapic_version.py | 0 .../google/cloud/memorystore_v1beta}/py.typed | 0 .../memorystore_v1beta/services/__init__.py | 0 .../services/memorystore/__init__.py | 4 +- .../services/memorystore/client.py | 561 +++-- .../services/memorystore/pagers.py | 46 +- .../memorystore/transports/README.rst | 0 .../memorystore/transports/__init__.py | 12 +- .../services/memorystore/transports/base.py | 192 +- .../services/memorystore/transports/rest.py | 916 ++++--- .../memorystore/transports/rest_base.py | 344 +-- .../memorystore_v1beta/types/__init__.py | 40 +- .../memorystore_v1beta/types/memorystore.py | 135 +- .../google-cloud-memorystore}/mypy.ini | 0 packages/google-cloud-memorystore/noxfile.py | 460 ++++ ...erated_memorystore_create_instance_sync.py | 0 ...erated_memorystore_delete_instance_sync.py | 0 ...orystore_get_certificate_authority_sync.py | 0 ...generated_memorystore_get_instance_sync.py | 0 ...nerated_memorystore_list_instances_sync.py | 0 ...erated_memorystore_update_instance_sync.py | 0 ...erated_memorystore_create_instance_sync.py | 0 ...erated_memorystore_delete_instance_sync.py | 0 ...orystore_get_certificate_authority_sync.py | 0 ...generated_memorystore_get_instance_sync.py | 0 ...nerated_memorystore_list_instances_sync.py | 0 ...erated_memorystore_update_instance_sync.py | 0 ..._metadata_google.cloud.memorystore.v1.json | 0 ...adata_google.cloud.memorystore.v1beta.json | 0 .../scripts/decrypt-secrets.sh | 46 + .../scripts/fixup_memorystore_v1_keywords.py | 0 .../fixup_memorystore_v1beta_keywords.py | 0 .../google-cloud-memorystore}/setup.py | 13 +- .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 0 .../testing/constraints-3.11.txt | 0 .../testing/constraints-3.12.txt | 0 .../testing/constraints-3.13.txt | 0 .../testing/constraints-3.7.txt | 0 .../testing/constraints-3.8.txt | 0 .../testing/constraints-3.9.txt | 0 .../tests}/__init__.py | 1 - .../tests/unit/__init__.py | 1 - .../tests/unit/gapic}/__init__.py | 1 - .../unit/gapic/memorystore_v1/__init__.py | 1 - .../gapic/memorystore_v1/test_memorystore.py | 2196 ++++++++++------ .../unit/gapic/memorystore_v1beta/__init__.py | 15 + .../memorystore_v1beta/test_memorystore.py | 2200 +++++++++++------ 122 files changed, 7404 insertions(+), 4750 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/setup.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/README.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/.coveragerc (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/.flake8 (92%) create mode 100644 packages/google-cloud-memorystore/.gitignore create mode 100644 packages/google-cloud-memorystore/CHANGELOG.md create mode 100644 packages/google-cloud-memorystore/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-memorystore/CONTRIBUTING.rst create mode 100644 packages/google-cloud-memorystore/LICENSE rename owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 => packages/google-cloud-memorystore/MANIFEST.in (69%) create mode 100644 packages/google-cloud-memorystore/README.rst create mode 120000 packages/google-cloud-memorystore/docs/CHANGELOG.md create mode 120000 packages/google-cloud-memorystore/docs/README.rst create mode 100644 packages/google-cloud-memorystore/docs/_static/custom.css create mode 100644 packages/google-cloud-memorystore/docs/_templates/layout.html rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/conf.py (88%) create mode 100644 packages/google-cloud-memorystore/docs/index.rst rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/memorystore_v1/memorystore.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/memorystore_v1/services_.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/memorystore_v1/types_.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/docs/memorystore_v1beta/memorystore.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/docs/memorystore_v1beta/services_.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/docs/memorystore_v1beta/types_.rst (100%) create mode 100644 packages/google-cloud-memorystore/docs/multiprocessing.rst create mode 100644 packages/google-cloud-memorystore/docs/summary_overview.md create mode 100644 packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore/py.typed (100%) create mode 100644 packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/gapic_metadata.json (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/py.typed (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/__init__.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta => packages/google-cloud-memorystore/google/cloud/memorystore_v1}/services/memorystore/__init__.py (94%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/client.py (81%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/pagers.py (78%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/README.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py (77%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/base.py (70%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/rest.py (67%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py (59%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/types/__init__.py (69%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/types/memorystore.py (93%) create mode 100644 packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/gapic_metadata.json (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore => packages/google-cloud-memorystore/google/cloud/memorystore_v1beta}/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore => packages/google-cloud-memorystore/google/cloud/memorystore_v1beta}/py.typed (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/__init__.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1 => packages/google-cloud-memorystore/google/cloud/memorystore_v1beta}/services/memorystore/__init__.py (94%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/client.py (81%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/pagers.py (78%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py (77%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py (70%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py (67%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py (59%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/types/__init__.py (69%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/types/memorystore.py (93%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/mypy.ini (100%) create mode 100644 packages/google-cloud-memorystore/noxfile.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json (100%) create mode 100755 packages/google-cloud-memorystore/scripts/decrypt-secrets.sh rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/scripts/fixup_memorystore_v1_keywords.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/scripts/fixup_memorystore_v1beta_keywords.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/setup.py (93%) create mode 100644 packages/google-cloud-memorystore/testing/.gitignore rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.10.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.11.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.12.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.13.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.7.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.8.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.9.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic => packages/google-cloud-memorystore/tests}/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/tests/unit/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1/tests => packages/google-cloud-memorystore/tests/unit/gapic}/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/tests/unit/gapic/memorystore_v1/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/tests/unit/gapic/memorystore_v1/test_memorystore.py (64%) create mode 100644 packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/tests/unit/gapic/memorystore_v1beta/test_memorystore.py (64%) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in b/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in deleted file mode 100644 index cb2b6f08702d..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/memorystore *.py -recursive-include google/cloud/memorystore_v1 *.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/README.rst b/owl-bot-staging/google-cloud-memorystore/v1/README.rst deleted file mode 100644 index 6f935a43af2b..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Memorystore API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Memorystore API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst b/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst deleted file mode 100644 index bd30847b6cc3..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - memorystore_v1/services_ - memorystore_v1/types_ diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py deleted file mode 100644 index 6af94ce66397..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.memorystore_v1.services.memorystore.client import MemorystoreClient - -from google.cloud.memorystore_v1.types.memorystore import CertificateAuthority -from google.cloud.memorystore_v1.types.memorystore import CreateInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import DeleteInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import DiscoveryEndpoint -from google.cloud.memorystore_v1.types.memorystore import GetCertificateAuthorityRequest -from google.cloud.memorystore_v1.types.memorystore import GetInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import Instance -from google.cloud.memorystore_v1.types.memorystore import ListInstancesRequest -from google.cloud.memorystore_v1.types.memorystore import ListInstancesResponse -from google.cloud.memorystore_v1.types.memorystore import NodeConfig -from google.cloud.memorystore_v1.types.memorystore import OperationMetadata -from google.cloud.memorystore_v1.types.memorystore import PersistenceConfig -from google.cloud.memorystore_v1.types.memorystore import PscAutoConnection -from google.cloud.memorystore_v1.types.memorystore import PscConnection -from google.cloud.memorystore_v1.types.memorystore import UpdateInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import ZoneDistributionConfig -from google.cloud.memorystore_v1.types.memorystore import ConnectionType -from google.cloud.memorystore_v1.types.memorystore import PscConnectionStatus - -__all__ = ('MemorystoreClient', - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py deleted file mode 100644 index 5ad10e820927..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.memorystore import MemorystoreClient - -from .types.memorystore import CertificateAuthority -from .types.memorystore import CreateInstanceRequest -from .types.memorystore import DeleteInstanceRequest -from .types.memorystore import DiscoveryEndpoint -from .types.memorystore import GetCertificateAuthorityRequest -from .types.memorystore import GetInstanceRequest -from .types.memorystore import Instance -from .types.memorystore import ListInstancesRequest -from .types.memorystore import ListInstancesResponse -from .types.memorystore import NodeConfig -from .types.memorystore import OperationMetadata -from .types.memorystore import PersistenceConfig -from .types.memorystore import PscAutoConnection -from .types.memorystore import PscConnection -from .types.memorystore import UpdateInstanceRequest -from .types.memorystore import ZoneDistributionConfig -from .types.memorystore import ConnectionType -from .types.memorystore import PscConnectionStatus - -__all__ = ( -'CertificateAuthority', -'ConnectionType', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'DiscoveryEndpoint', -'GetCertificateAuthorityRequest', -'GetInstanceRequest', -'Instance', -'ListInstancesRequest', -'ListInstancesResponse', -'MemorystoreClient', -'NodeConfig', -'OperationMetadata', -'PersistenceConfig', -'PscAutoConnection', -'PscConnection', -'PscConnectionStatus', -'UpdateInstanceRequest', -'ZoneDistributionConfig', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py b/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py deleted file mode 100644 index f0629be85ae7..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-memorystore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/setup.py b/owl-bot-staging/google-cloud-memorystore/v1/setup.py deleted file mode 100644 index 3cb03e5a17ca..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-memorystore' - - -description = "Google Cloud Memorystore API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/memorystore/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc b/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc deleted file mode 100644 index 90ec0ce4fe89..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/memorystore/__init__.py - google/cloud/memorystore/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in b/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in deleted file mode 100644 index fa2894ae07c0..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/memorystore *.py -recursive-include google/cloud/memorystore_v1beta *.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst deleted file mode 100644 index 6f935a43af2b..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Memorystore API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Memorystore API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py deleted file mode 100644 index 8d134830ab88..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-memorystore documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-memorystore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-memorystore-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-memorystore.tex", - u"google-cloud-memorystore Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-memorystore", - u"Google Cloud Memorystore Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-memorystore", - u"google-cloud-memorystore Documentation", - author, - "google-cloud-memorystore", - "GAPIC library for Google Cloud Memorystore API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst deleted file mode 100644 index 6c5c2af1be0f..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - memorystore_v1beta/services_ - memorystore_v1beta/types_ diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py deleted file mode 100644 index df718a2f7777..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.memorystore_v1beta.services.memorystore.client import MemorystoreClient - -from google.cloud.memorystore_v1beta.types.memorystore import CertificateAuthority -from google.cloud.memorystore_v1beta.types.memorystore import CreateInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import DeleteInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import DiscoveryEndpoint -from google.cloud.memorystore_v1beta.types.memorystore import GetCertificateAuthorityRequest -from google.cloud.memorystore_v1beta.types.memorystore import GetInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import Instance -from google.cloud.memorystore_v1beta.types.memorystore import ListInstancesRequest -from google.cloud.memorystore_v1beta.types.memorystore import ListInstancesResponse -from google.cloud.memorystore_v1beta.types.memorystore import NodeConfig -from google.cloud.memorystore_v1beta.types.memorystore import OperationMetadata -from google.cloud.memorystore_v1beta.types.memorystore import PersistenceConfig -from google.cloud.memorystore_v1beta.types.memorystore import PscAutoConnection -from google.cloud.memorystore_v1beta.types.memorystore import PscConnection -from google.cloud.memorystore_v1beta.types.memorystore import UpdateInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import ZoneDistributionConfig -from google.cloud.memorystore_v1beta.types.memorystore import ConnectionType -from google.cloud.memorystore_v1beta.types.memorystore import PscConnectionStatus - -__all__ = ('MemorystoreClient', - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py deleted file mode 100644 index 72d6431d3df2..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore_v1beta import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.memorystore import MemorystoreClient - -from .types.memorystore import CertificateAuthority -from .types.memorystore import CreateInstanceRequest -from .types.memorystore import DeleteInstanceRequest -from .types.memorystore import DiscoveryEndpoint -from .types.memorystore import GetCertificateAuthorityRequest -from .types.memorystore import GetInstanceRequest -from .types.memorystore import Instance -from .types.memorystore import ListInstancesRequest -from .types.memorystore import ListInstancesResponse -from .types.memorystore import NodeConfig -from .types.memorystore import OperationMetadata -from .types.memorystore import PersistenceConfig -from .types.memorystore import PscAutoConnection -from .types.memorystore import PscConnection -from .types.memorystore import UpdateInstanceRequest -from .types.memorystore import ZoneDistributionConfig -from .types.memorystore import ConnectionType -from .types.memorystore import PscConnectionStatus - -__all__ = ( -'CertificateAuthority', -'ConnectionType', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'DiscoveryEndpoint', -'GetCertificateAuthorityRequest', -'GetInstanceRequest', -'Instance', -'ListInstancesRequest', -'ListInstancesResponse', -'MemorystoreClient', -'NodeConfig', -'OperationMetadata', -'PersistenceConfig', -'PscAutoConnection', -'PscConnection', -'PscConnectionStatus', -'UpdateInstanceRequest', -'ZoneDistributionConfig', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed deleted file mode 100644 index 3e10cbb3572e..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-memorystore package uses inline types. diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini b/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py b/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py deleted file mode 100644 index 9d2f5c2af33c..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-memorystore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1beta/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1beta/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/.coveragerc b/packages/google-cloud-memorystore/.coveragerc similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/.coveragerc rename to packages/google-cloud-memorystore/.coveragerc diff --git a/owl-bot-staging/google-cloud-memorystore/v1/.flake8 b/packages/google-cloud-memorystore/.flake8 similarity index 92% rename from owl-bot-staging/google-cloud-memorystore/v1/.flake8 rename to packages/google-cloud-memorystore/.flake8 index 29227d4cf419..32986c79287a 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/.flake8 +++ b/packages/google-cloud-memorystore/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-memorystore/.gitignore b/packages/google-cloud-memorystore/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-memorystore/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-memorystore/CHANGELOG.md b/packages/google-cloud-memorystore/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-cloud-memorystore/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-cloud-memorystore/CODE_OF_CONDUCT.md b/packages/google-cloud-memorystore/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-memorystore/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-memorystore/CONTRIBUTING.rst b/packages/google-cloud-memorystore/CONTRIBUTING.rst new file mode 100644 index 000000000000..c51aa4cb16ff --- /dev/null +++ b/packages/google-cloud-memorystore/CONTRIBUTING.rst @@ -0,0 +1,273 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.13 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.13 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-memorystore + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-memorystore/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-memorystore/LICENSE b/packages/google-cloud-memorystore/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-memorystore/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 b/packages/google-cloud-memorystore/MANIFEST.in similarity index 69% rename from owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 rename to packages/google-cloud-memorystore/MANIFEST.in index 29227d4cf419..d6814cd60037 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 +++ b/packages/google-cloud-memorystore/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,19 +15,11 @@ # limitations under the License. # Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-memorystore/README.rst b/packages/google-cloud-memorystore/README.rst new file mode 100644 index 000000000000..c7ac077d4c69 --- /dev/null +++ b/packages/google-cloud-memorystore/README.rst @@ -0,0 +1,108 @@ +Python Client for +================== + +|preview| |pypi| |versions| + +``_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-memorystore.svg + :target: https://pypi.org/project/google-cloud-memorystore/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memorystore.svg + :target: https://pypi.org/project/google-cloud-memorystore/ +.. _: +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_overview +.. _Product Documentation: + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the .`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the .: +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-memorystore + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-memorystore + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for + to see other available methods on the client. +- Read the ` Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _ Product documentation: +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-memorystore/docs/CHANGELOG.md b/packages/google-cloud-memorystore/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-memorystore/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-memorystore/docs/README.rst b/packages/google-cloud-memorystore/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-memorystore/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-memorystore/docs/_static/custom.css b/packages/google-cloud-memorystore/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-memorystore/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-memorystore/docs/_templates/layout.html b/packages/google-cloud-memorystore/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-memorystore/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py b/packages/google-cloud-memorystore/docs/conf.py similarity index 88% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py rename to packages/google-cloud-memorystore/docs/conf.py index 8d134830ab88..1dd800bba097 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py +++ b/packages/google-cloud-memorystore/docs/conf.py @@ -5,7 +5,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# # google-cloud-memorystore documentation build configuration file # # This file is execfile()d with the current directory set to its @@ -25,21 +24,25 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +52,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -78,9 +80,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-memorystore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +project = "google-cloud-memorystore" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +98,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +108,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +154,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-memorystore", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -258,13 +266,13 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', + #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', + #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - # 'preamble': '', + #'preamble': '', # Latex figure (float) alignment - # 'figure_align': 'htbp', + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -274,7 +282,7 @@ ( root_doc, "google-cloud-memorystore.tex", - u"google-cloud-memorystore Documentation", + "google-cloud-memorystore Documentation", author, "manual", ) @@ -309,7 +317,7 @@ ( root_doc, "google-cloud-memorystore", - u"Google Cloud Memorystore Documentation", + "google-cloud-memorystore Documentation", [author], 1, ) @@ -328,10 +336,10 @@ ( root_doc, "google-cloud-memorystore", - u"google-cloud-memorystore Documentation", + "google-cloud-memorystore Documentation", author, "google-cloud-memorystore", - "GAPIC library for Google Cloud Memorystore API", + "google-cloud-memorystore Library", "APIs", ) ] @@ -351,14 +359,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/google-cloud-memorystore/docs/index.rst b/packages/google-cloud-memorystore/docs/index.rst new file mode 100644 index 000000000000..e2ce5cdc7aeb --- /dev/null +++ b/packages/google-cloud-memorystore/docs/index.rst @@ -0,0 +1,39 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of . +By default, you will get version ``memorystore_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + memorystore_v1/services_ + memorystore_v1/types_ + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + memorystore_v1beta/services_ + memorystore_v1beta/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-memorystore`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst b/packages/google-cloud-memorystore/docs/memorystore_v1/memorystore.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1/memorystore.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1/services_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1/services_.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1/types_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1/types_.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst b/packages/google-cloud-memorystore/docs/memorystore_v1beta/memorystore.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1beta/memorystore.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1beta/services_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1beta/services_.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1beta/types_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1beta/types_.rst diff --git a/packages/google-cloud-memorystore/docs/multiprocessing.rst b/packages/google-cloud-memorystore/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-memorystore/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-memorystore/docs/summary_overview.md b/packages/google-cloud-memorystore/docs/summary_overview.md new file mode 100644 index 000000000000..607b7f1693fc --- /dev/null +++ b/packages/google-cloud-memorystore/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# API + +Overview of the APIs available for API. + +## All entries + +Classes, methods and properties & attributes for + API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_property.html) diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py new file mode 100644 index 000000000000..a18c9a80f0b2 --- /dev/null +++ b/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.memorystore_v1.services.memorystore.client import MemorystoreClient +from google.cloud.memorystore_v1.types.memorystore import ( + CertificateAuthority, + ConnectionType, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + PscConnectionStatus, + UpdateInstanceRequest, + ZoneDistributionConfig, +) + +__all__ = ( + "MemorystoreClient", + "CertificateAuthority", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "UpdateInstanceRequest", + "ZoneDistributionConfig", + "ConnectionType", + "PscConnectionStatus", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py rename to packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed b/packages/google-cloud-memorystore/google/cloud/memorystore/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed rename to packages/google-cloud-memorystore/google/cloud/memorystore/py.typed diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py new file mode 100644 index 000000000000..64c6a11772a4 --- /dev/null +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.memorystore import MemorystoreClient +from .types.memorystore import ( + CertificateAuthority, + ConnectionType, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + PscConnectionStatus, + UpdateInstanceRequest, + ZoneDistributionConfig, +) + +__all__ = ( + "CertificateAuthority", + "ConnectionType", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MemorystoreClient", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "PscConnectionStatus", + "UpdateInstanceRequest", + "ZoneDistributionConfig", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/py.typed diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/__init__.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/__init__.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/__init__.py similarity index 94% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/__init__.py index e2240b4bffb7..d1c440dabfa0 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/__init__.py @@ -15,6 +15,4 @@ # from .client import MemorystoreClient -__all__ = ( - 'MemorystoreClient', -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py similarity index 81% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py index 5dfc5b0d8992..eef12973a155 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py @@ -16,20 +16,32 @@ from collections import OrderedDict import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.memorystore_v1 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.memorystore_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -38,14 +50,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.memorystore_v1.services.memorystore import pagers -from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MemorystoreTransport, DEFAULT_CLIENT_INFO + +from google.cloud.memorystore_v1.services.memorystore import pagers +from google.cloud.memorystore_v1.types import memorystore + +from .transports.base import DEFAULT_CLIENT_INFO, MemorystoreTransport from .transports.rest import MemorystoreRestTransport @@ -56,12 +70,14 @@ class MemorystoreClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] _transport_registry["rest"] = MemorystoreRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MemorystoreTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MemorystoreTransport]: """Returns an appropriate transport class. Args: @@ -153,8 +169,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: MemorystoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -171,117 +186,193 @@ def transport(self) -> MemorystoreTransport: return self._transport @staticmethod - def certificate_authority_path(project: str,location: str,instance: str,) -> str: + def certificate_authority_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified certificate_authority string.""" - return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_certificate_authority_path(path: str) -> Dict[str,str]: + def parse_certificate_authority_path(path: str) -> Dict[str, str]: """Parses a certificate_authority path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", + path, + ) return m.groupdict() if m else {} @staticmethod - def forwarding_rule_path(project: str,region: str,forwarding_rule: str,) -> str: + def forwarding_rule_path( + project: str, + region: str, + forwarding_rule: str, + ) -> str: """Returns a fully-qualified forwarding_rule string.""" - return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) @staticmethod - def parse_forwarding_rule_path(path: str) -> Dict[str,str]: + def parse_forwarding_rule_path(path: str) -> Dict[str, str]: """Parses a forwarding_rule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path(path: str) -> Dict[str, str]: """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def network_path(project: str,network: str,) -> str: + def network_path( + project: str, + network: str, + ) -> str: """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: + def parse_network_path(path: str) -> Dict[str, str]: """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def service_attachment_path(project: str,region: str,service_attachment: str,) -> str: + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: """Returns a fully-qualified service_attachment string.""" - return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) @staticmethod - def parse_service_attachment_path(path: str) -> Dict[str,str]: + def parse_service_attachment_path(path: str) -> Dict[str, str]: """Parses a service_attachment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -313,16 +404,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -335,7 +432,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -356,13 +455,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -385,7 +490,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -401,17 +508,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = MemorystoreClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = MemorystoreClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -464,12 +579,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the memorystore client. Args: @@ -524,21 +643,33 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MemorystoreClient._read_environment_variables() - self._client_cert_source = MemorystoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MemorystoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MemorystoreClient._read_environment_variables() + self._client_cert_source = MemorystoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MemorystoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -547,8 +678,10 @@ def __init__(self, *, if transport_provided: # transport is a MemorystoreTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -557,20 +690,26 @@ def __init__(self, *, self._transport = cast(MemorystoreTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - MemorystoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or MemorystoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[MemorystoreTransport], Callable[..., MemorystoreTransport]] = ( + transport_init: Union[ + Type[MemorystoreTransport], Callable[..., MemorystoreTransport] + ] = ( MemorystoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MemorystoreTransport], transport) @@ -588,14 +727,15 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) - def list_instances(self, - request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: r"""Lists Instances in a given project and location. .. code-block:: python @@ -655,8 +795,10 @@ def sample_list_instances(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -674,9 +816,7 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -704,14 +844,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.Instance: + def get_instance( + self, + request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Gets details of a single Instance. .. code-block:: python @@ -766,8 +907,10 @@ def sample_get_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -785,9 +928,7 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -804,16 +945,17 @@ def sample_get_instance(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance: Optional[memorystore.Instance] = None, - instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[memorystore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Instance in a given project and location. @@ -907,8 +1049,10 @@ def sample_create_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance, instance_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -930,9 +1074,7 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -957,15 +1099,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[memorystore.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[memorystore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Updates the parameters of a single Instance. .. code-block:: python @@ -1039,8 +1182,10 @@ def sample_update_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1060,9 +1205,9 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -1087,14 +1232,15 @@ def sample_update_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Instance. .. code-block:: python @@ -1165,8 +1311,10 @@ def sample_delete_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1184,9 +1332,7 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1211,14 +1357,17 @@ def sample_delete_instance(): # Done; return the response. return response - def get_certificate_authority(self, - request: Optional[Union[memorystore.GetCertificateAuthorityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.CertificateAuthority: + def get_certificate_authority( + self, + request: Optional[ + Union[memorystore.GetCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Gets details about the certificate authority for an Instance. @@ -1277,8 +1426,10 @@ def sample_get_certificate_authority(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1291,14 +1442,14 @@ def sample_get_certificate_authority(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_certificate_authority] + rpc = self._transport._wrapped_methods[ + self._transport.get_certificate_authority + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1364,8 +1515,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1373,7 +1523,11 @@ def list_operations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1414,8 +1568,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1423,7 +1576,11 @@ def get_operation( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1468,15 +1625,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -1517,15 +1678,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -1563,8 +1728,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1572,7 +1736,11 @@ def get_location( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1613,8 +1781,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1622,15 +1789,19 @@ def list_locations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) -__all__ = ( - "MemorystoreClient", -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py similarity index 78% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py index 025c07bfda1e..027706df288d 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., memorystore.ListInstancesResponse], - request: memorystore.ListInstancesRequest, - response: memorystore.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., memorystore.ListInstancesResponse], + request: memorystore.ListInstancesRequest, + response: memorystore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -82,7 +99,12 @@ def pages(self) -> Iterator[memorystore.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[memorystore.Instance]: @@ -90,4 +112,4 @@ def __iter__(self) -> Iterator[memorystore.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/README.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/README.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py similarity index 77% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py index 6172c94a25d4..a1f2673d9bb5 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py @@ -17,16 +17,14 @@ from typing import Dict, Type from .base import MemorystoreTransport -from .rest import MemorystoreRestTransport -from .rest import MemorystoreRestInterceptor - +from .rest import MemorystoreRestInterceptor, MemorystoreRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] -_transport_registry['rest'] = MemorystoreRestTransport +_transport_registry["rest"] = MemorystoreRestTransport __all__ = ( - 'MemorystoreTransport', - 'MemorystoreRestTransport', - 'MemorystoreRestInterceptor', + "MemorystoreTransport", + "MemorystoreRestTransport", + "MemorystoreRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py similarity index 70% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py index 216396a3b63e..e81e20865449 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py @@ -16,44 +16,44 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.memorystore_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1 import gapic_version as package_version from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class MemorystoreTransport(abc.ABC): """Abstract transport class for Memorystore.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "memorystore.googleapis.com" - DEFAULT_HOST: str = 'memorystore.googleapis.com' def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -89,30 +89,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -209,14 +217,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -226,57 +234,63 @@ def operations_client(self): raise NotImplementedError() @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - Union[ - memorystore.ListInstancesResponse, - Awaitable[memorystore.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], + Union[ + memorystore.ListInstancesResponse, + Awaitable[memorystore.ListInstancesResponse], + ], + ]: raise NotImplementedError() @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - Union[ - memorystore.Instance, - Awaitable[memorystore.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [memorystore.GetInstanceRequest], + Union[memorystore.Instance, Awaitable[memorystore.Instance]], + ]: raise NotImplementedError() @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [memorystore.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [memorystore.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [memorystore.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - Union[ - memorystore.CertificateAuthority, - Awaitable[memorystore.CertificateAuthority] - ]]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + Union[ + memorystore.CertificateAuthority, + Awaitable[memorystore.CertificateAuthority], + ], + ]: raise NotImplementedError() @property @@ -284,7 +298,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -300,23 +317,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -324,10 +336,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -336,6 +352,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MemorystoreTransport', -) +__all__ = ("MemorystoreTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py similarity index 67% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py index d990c453e173..f28f5839be6a 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py @@ -14,31 +14,25 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseMemorystoreRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseMemorystoreRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -121,7 +115,12 @@ def post_update_instance(self, response): """ - def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + + def pre_create_instance( + self, + request: memorystore.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -129,7 +128,9 @@ def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance Override in a subclass to manipulate the response @@ -138,7 +139,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_delete_instance( + self, + request: memorystore.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -146,7 +151,9 @@ def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance Override in a subclass to manipulate the response @@ -155,7 +162,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_get_certificate_authority(self, request: memorystore.GetCertificateAuthorityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: + def pre_get_certificate_authority( + self, + request: memorystore.GetCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the request or metadata @@ -163,7 +174,9 @@ def pre_get_certificate_authority(self, request: memorystore.GetCertificateAutho """ return request, metadata - def post_get_certificate_authority(self, response: memorystore.CertificateAuthority) -> memorystore.CertificateAuthority: + def post_get_certificate_authority( + self, response: memorystore.CertificateAuthority + ) -> memorystore.CertificateAuthority: """Post-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the response @@ -172,7 +185,11 @@ def post_get_certificate_authority(self, response: memorystore.CertificateAuthor """ return response - def pre_get_instance(self, request: memorystore.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_get_instance( + self, + request: memorystore.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -189,7 +206,11 @@ def post_get_instance(self, response: memorystore.Instance) -> memorystore.Insta """ return response - def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + def pre_list_instances( + self, + request: memorystore.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -197,7 +218,9 @@ def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: memorystore.ListInstancesResponse) -> memorystore.ListInstancesResponse: + def post_list_instances( + self, response: memorystore.ListInstancesResponse + ) -> memorystore.ListInstancesResponse: """Post-rpc interceptor for list_instances Override in a subclass to manipulate the response @@ -206,7 +229,11 @@ def post_list_instances(self, response: memorystore.ListInstancesResponse) -> me """ return response - def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_update_instance( + self, + request: memorystore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -214,7 +241,9 @@ def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance Override in a subclass to manipulate the response @@ -224,7 +253,9 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location @@ -245,7 +276,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations @@ -266,7 +299,9 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation @@ -275,9 +310,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -287,7 +320,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation @@ -296,9 +331,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -308,7 +341,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation @@ -329,7 +364,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations @@ -369,20 +406,21 @@ class MemorystoreRestTransport(_BaseMemorystoreRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MemorystoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MemorystoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -425,10 +463,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -445,46 +484,51 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateInstance(_BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub): + class _CreateInstance( + _BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CreateInstance") @@ -496,27 +540,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -536,17 +582,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -559,7 +621,9 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) return resp - class _DeleteInstance(_BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub): + class _DeleteInstance( + _BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteInstance") @@ -571,26 +635,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -610,15 +676,28 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + ) request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -631,7 +710,9 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) return resp - class _GetCertificateAuthority(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub): + class _GetCertificateAuthority( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetCertificateAuthority") @@ -643,26 +724,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetCertificateAuthorityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.CertificateAuthority: + def __call__( + self, + request: memorystore.GetCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Call the get certificate authority method over HTTP. Args: @@ -681,15 +764,30 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() - request, metadata = self._interceptor.pre_get_certificate_authority(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + ) + request, metadata = self._interceptor.pre_get_certificate_authority( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._GetCertificateAuthority._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetCertificateAuthority._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -704,7 +802,9 @@ def __call__(self, resp = self._interceptor.post_get_certificate_authority(resp) return resp - class _GetInstance(_BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub): + class _GetInstance( + _BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetInstance") @@ -716,26 +816,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.Instance: + def __call__( + self, + request: memorystore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Call the get instance method over HTTP. Args: @@ -752,15 +854,32 @@ def __call__(self, A Memorystore instance. """ - http_options = _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + ) request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -775,7 +894,9 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp - class _ListInstances(_BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub): + class _ListInstances( + _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListInstances") @@ -787,26 +908,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.ListInstancesResponse: + def __call__( + self, + request: memorystore.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -823,15 +946,30 @@ def __call__(self, Response message for [ListInstances][]. """ - http_options = _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + ) request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -846,7 +984,9 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp - class _UpdateInstance(_BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub): + class _UpdateInstance( + _BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.UpdateInstance") @@ -858,27 +998,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -898,17 +1040,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -922,58 +1080,64 @@ def __call__(self, return resp @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[memorystore.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[memorystore.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - memorystore.CertificateAuthority]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], memorystore.CertificateAuthority + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - memorystore.Instance]: + def get_instance( + self, + ) -> Callable[[memorystore.GetInstanceRequest], memorystore.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - memorystore.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], memorystore.ListInstancesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[memorystore.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub): + class _GetLocation( + _BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetLocation") @@ -985,27 +1149,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1021,15 +1186,32 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1044,9 +1226,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub): + class _ListLocations( + _BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListLocations") @@ -1058,27 +1242,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1094,15 +1279,30 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1117,9 +1317,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub): + class _CancelOperation( + _BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CancelOperation") @@ -1131,27 +1333,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1164,15 +1367,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1183,9 +1401,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub): + class _DeleteOperation( + _BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteOperation") @@ -1197,27 +1417,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1230,15 +1451,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1249,9 +1485,11 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub): + class _GetOperation( + _BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetOperation") @@ -1263,27 +1501,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1299,15 +1538,32 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1322,9 +1578,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub): + class _ListOperations( + _BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListOperations") @@ -1336,27 +1594,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1372,15 +1631,28 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1401,6 +1673,4 @@ def close(self): self._session.close() -__all__=( - 'MemorystoreRestTransport', -) +__all__ = ("MemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py similarity index 59% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py index 3155d66c201d..b7db715096ee 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py @@ -14,19 +14,17 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import MemorystoreTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, MemorystoreTransport class _BaseMemorystoreRestTransport(MemorystoreTransport): @@ -42,14 +40,16 @@ class _BaseMemorystoreRestTransport(MemorystoreTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +73,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,27 +86,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,17 +127,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -138,19 +152,23 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -162,11 +180,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -175,19 +199,23 @@ class _BaseGetCertificateAuthority: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority", + }, ] return http_options @@ -199,11 +227,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -212,19 +246,23 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -236,11 +274,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -249,19 +293,23 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -273,11 +321,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -286,20 +340,24 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -314,17 +372,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -335,23 +399,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -360,23 +424,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -385,23 +449,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -410,23 +474,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -435,23 +499,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -460,26 +524,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseMemorystoreRestTransport', -) +__all__ = ("_BaseMemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py similarity index 69% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py index fc13543f2db2..a639afb23c9d 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py @@ -15,6 +15,7 @@ # from .memorystore import ( CertificateAuthority, + ConnectionType, CreateInstanceRequest, DeleteInstanceRequest, DiscoveryEndpoint, @@ -28,29 +29,28 @@ PersistenceConfig, PscAutoConnection, PscConnection, + PscConnectionStatus, UpdateInstanceRequest, ZoneDistributionConfig, - ConnectionType, - PscConnectionStatus, ) __all__ = ( - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', + "CertificateAuthority", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "UpdateInstanceRequest", + "ZoneDistributionConfig", + "ConnectionType", + "PscConnectionStatus", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py similarity index 93% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py index ed348f00d027..840f7254c853 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py @@ -17,33 +17,31 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.memorystore.v1', + package="google.cloud.memorystore.v1", manifest={ - 'PscConnectionStatus', - 'ConnectionType', - 'Instance', - 'PscAutoConnection', - 'PscConnection', - 'DiscoveryEndpoint', - 'PersistenceConfig', - 'NodeConfig', - 'ZoneDistributionConfig', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'GetCertificateAuthorityRequest', - 'CertificateAuthority', - 'OperationMetadata', + "PscConnectionStatus", + "ConnectionType", + "Instance", + "PscAutoConnection", + "PscConnection", + "DiscoveryEndpoint", + "PersistenceConfig", + "NodeConfig", + "ZoneDistributionConfig", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "GetCertificateAuthorityRequest", + "CertificateAuthority", + "OperationMetadata", }, ) @@ -162,6 +160,7 @@ class Instance(proto.Message): mode (google.cloud.memorystore_v1.types.Instance.Mode): Optional. The mode config for the instance. """ + class State(proto.Enum): r"""Possible states of the instance. @@ -295,11 +294,11 @@ class UpdateInfo(proto.Message): optional=True, ) - update_info: 'Instance.StateInfo.UpdateInfo' = proto.Field( + update_info: "Instance.StateInfo.UpdateInfo" = proto.Field( proto.MESSAGE, number=1, - oneof='info', - message='Instance.StateInfo.UpdateInfo', + oneof="info", + message="Instance.StateInfo.UpdateInfo", ) class InstanceEndpoint(proto.Message): @@ -315,10 +314,10 @@ class InstanceEndpoint(proto.Message): each service attachment in the cluster. """ - connections: MutableSequence['Instance.ConnectionDetail'] = proto.RepeatedField( + connections: MutableSequence["Instance.ConnectionDetail"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance.ConnectionDetail', + message="Instance.ConnectionDetail", ) class ConnectionDetail(proto.Message): @@ -345,17 +344,17 @@ class ConnectionDetail(proto.Message): This field is a member of `oneof`_ ``connection``. """ - psc_auto_connection: 'PscAutoConnection' = proto.Field( + psc_auto_connection: "PscAutoConnection" = proto.Field( proto.MESSAGE, number=1, - oneof='connection', - message='PscAutoConnection', + oneof="connection", + message="PscAutoConnection", ) - psc_connection: 'PscConnection' = proto.Field( + psc_connection: "PscConnection" = proto.Field( proto.MESSAGE, number=2, - oneof='connection', - message='PscConnection', + oneof="connection", + message="PscConnection", ) name: str = proto.Field( @@ -410,20 +409,20 @@ class ConnectionDetail(proto.Message): proto.INT32, number=11, ) - discovery_endpoints: MutableSequence['DiscoveryEndpoint'] = proto.RepeatedField( + discovery_endpoints: MutableSequence["DiscoveryEndpoint"] = proto.RepeatedField( proto.MESSAGE, number=12, - message='DiscoveryEndpoint', + message="DiscoveryEndpoint", ) node_type: NodeType = proto.Field( proto.ENUM, number=13, enum=NodeType, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=14, - message='PersistenceConfig', + message="PersistenceConfig", ) engine_version: str = proto.Field( proto.STRING, @@ -434,25 +433,25 @@ class ConnectionDetail(proto.Message): proto.STRING, number=16, ) - node_config: 'NodeConfig' = proto.Field( + node_config: "NodeConfig" = proto.Field( proto.MESSAGE, number=17, - message='NodeConfig', + message="NodeConfig", ) - zone_distribution_config: 'ZoneDistributionConfig' = proto.Field( + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( proto.MESSAGE, number=18, - message='ZoneDistributionConfig', + message="ZoneDistributionConfig", ) deletion_protection_enabled: bool = proto.Field( proto.BOOL, number=19, optional=True, ) - psc_auto_connections: MutableSequence['PscAutoConnection'] = proto.RepeatedField( + psc_auto_connections: MutableSequence["PscAutoConnection"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='PscAutoConnection', + message="PscAutoConnection", ) endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( proto.MESSAGE, @@ -514,7 +513,7 @@ class PscAutoConnection(proto.Message): port: int = proto.Field( proto.INT32, number=9, - oneof='ports', + oneof="ports", ) psc_connection_id: str = proto.Field( proto.STRING, @@ -540,15 +539,15 @@ class PscAutoConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -613,15 +612,15 @@ class PscConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -669,6 +668,7 @@ class PersistenceConfig(proto.Message): Optional. AOF configuration. This field will be ignored if mode is not AOF. """ + class PersistenceMode(proto.Enum): r"""Possible persistence modes. @@ -700,6 +700,7 @@ class RDBConfig(proto.Message): snapshots will be aligned. If not provided, the current time will be used. """ + class SnapshotPeriod(proto.Enum): r"""Possible snapshot periods. @@ -721,10 +722,10 @@ class SnapshotPeriod(proto.Enum): TWELVE_HOURS = 3 TWENTY_FOUR_HOURS = 4 - rdb_snapshot_period: 'PersistenceConfig.RDBConfig.SnapshotPeriod' = proto.Field( + rdb_snapshot_period: "PersistenceConfig.RDBConfig.SnapshotPeriod" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.RDBConfig.SnapshotPeriod', + enum="PersistenceConfig.RDBConfig.SnapshotPeriod", ) rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -739,6 +740,7 @@ class AOFConfig(proto.Message): append_fsync (google.cloud.memorystore_v1.types.PersistenceConfig.AOFConfig.AppendFsync): Optional. The fsync mode. """ + class AppendFsync(proto.Enum): r"""Possible fsync modes. @@ -762,10 +764,10 @@ class AppendFsync(proto.Enum): EVERY_SEC = 2 ALWAYS = 3 - append_fsync: 'PersistenceConfig.AOFConfig.AppendFsync' = proto.Field( + append_fsync: "PersistenceConfig.AOFConfig.AppendFsync" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.AOFConfig.AppendFsync', + enum="PersistenceConfig.AOFConfig.AppendFsync", ) mode: PersistenceMode = proto.Field( @@ -811,6 +813,7 @@ class ZoneDistributionConfig(proto.Message): Optional. Current zone distribution mode. Defaults to MULTI_ZONE. """ + class ZoneDistributionMode(proto.Enum): r"""Possible zone distribution modes. @@ -904,10 +907,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -986,10 +989,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1034,10 +1037,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1142,16 +1145,18 @@ class CertChain(proto.Message): number=1, ) - ca_certs: MutableSequence['CertificateAuthority.ManagedCertificateAuthority.CertChain'] = proto.RepeatedField( + ca_certs: MutableSequence[ + "CertificateAuthority.ManagedCertificateAuthority.CertChain" + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message='CertificateAuthority.ManagedCertificateAuthority.CertChain', + message="CertificateAuthority.ManagedCertificateAuthority.CertChain", ) managed_server_ca: ManagedCertificateAuthority = proto.Field( proto.MESSAGE, number=2, - oneof='server_ca', + oneof="server_ca", message=ManagedCertificateAuthority, ) name: str = proto.Field( diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py new file mode 100644 index 000000000000..f787aca084c1 --- /dev/null +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.memorystore import MemorystoreClient +from .types.memorystore import ( + CertificateAuthority, + ConnectionType, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + PscConnectionStatus, + UpdateInstanceRequest, + ZoneDistributionConfig, +) + +__all__ = ( + "CertificateAuthority", + "ConnectionType", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MemorystoreClient", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "PscConnectionStatus", + "UpdateInstanceRequest", + "ZoneDistributionConfig", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/py.typed diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/__init__.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/__init__.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/__init__.py similarity index 94% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/__init__.py index e2240b4bffb7..d1c440dabfa0 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/__init__.py @@ -15,6 +15,4 @@ # from .client import MemorystoreClient -__all__ = ( - 'MemorystoreClient', -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py similarity index 81% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py index f543e438492b..546c4c14b9af 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py @@ -16,20 +16,32 @@ from collections import OrderedDict import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.memorystore_v1beta import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.memorystore_v1beta import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -38,14 +50,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.memorystore_v1beta.services.memorystore import pagers -from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MemorystoreTransport, DEFAULT_CLIENT_INFO + +from google.cloud.memorystore_v1beta.services.memorystore import pagers +from google.cloud.memorystore_v1beta.types import memorystore + +from .transports.base import DEFAULT_CLIENT_INFO, MemorystoreTransport from .transports.rest import MemorystoreRestTransport @@ -56,12 +70,14 @@ class MemorystoreClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] _transport_registry["rest"] = MemorystoreRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MemorystoreTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MemorystoreTransport]: """Returns an appropriate transport class. Args: @@ -153,8 +169,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: MemorystoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -171,117 +186,193 @@ def transport(self) -> MemorystoreTransport: return self._transport @staticmethod - def certificate_authority_path(project: str,location: str,instance: str,) -> str: + def certificate_authority_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified certificate_authority string.""" - return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_certificate_authority_path(path: str) -> Dict[str,str]: + def parse_certificate_authority_path(path: str) -> Dict[str, str]: """Parses a certificate_authority path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", + path, + ) return m.groupdict() if m else {} @staticmethod - def forwarding_rule_path(project: str,region: str,forwarding_rule: str,) -> str: + def forwarding_rule_path( + project: str, + region: str, + forwarding_rule: str, + ) -> str: """Returns a fully-qualified forwarding_rule string.""" - return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) @staticmethod - def parse_forwarding_rule_path(path: str) -> Dict[str,str]: + def parse_forwarding_rule_path(path: str) -> Dict[str, str]: """Parses a forwarding_rule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path(path: str) -> Dict[str, str]: """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def network_path(project: str,network: str,) -> str: + def network_path( + project: str, + network: str, + ) -> str: """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: + def parse_network_path(path: str) -> Dict[str, str]: """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def service_attachment_path(project: str,region: str,service_attachment: str,) -> str: + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: """Returns a fully-qualified service_attachment string.""" - return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) @staticmethod - def parse_service_attachment_path(path: str) -> Dict[str,str]: + def parse_service_attachment_path(path: str) -> Dict[str, str]: """Parses a service_attachment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -313,16 +404,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -335,7 +432,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -356,13 +455,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -385,7 +490,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -401,17 +508,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = MemorystoreClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = MemorystoreClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -464,12 +579,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the memorystore client. Args: @@ -524,21 +643,33 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MemorystoreClient._read_environment_variables() - self._client_cert_source = MemorystoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MemorystoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MemorystoreClient._read_environment_variables() + self._client_cert_source = MemorystoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MemorystoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -547,8 +678,10 @@ def __init__(self, *, if transport_provided: # transport is a MemorystoreTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -557,20 +690,26 @@ def __init__(self, *, self._transport = cast(MemorystoreTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - MemorystoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or MemorystoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[MemorystoreTransport], Callable[..., MemorystoreTransport]] = ( + transport_init: Union[ + Type[MemorystoreTransport], Callable[..., MemorystoreTransport] + ] = ( MemorystoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MemorystoreTransport], transport) @@ -588,14 +727,15 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) - def list_instances(self, - request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: r"""Lists Instances in a given project and location. .. code-block:: python @@ -655,8 +795,10 @@ def sample_list_instances(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -674,9 +816,7 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -704,14 +844,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.Instance: + def get_instance( + self, + request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Gets details of a single Instance. .. code-block:: python @@ -766,8 +907,10 @@ def sample_get_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -785,9 +928,7 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -804,16 +945,17 @@ def sample_get_instance(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance: Optional[memorystore.Instance] = None, - instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[memorystore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Instance in a given project and location. @@ -907,8 +1049,10 @@ def sample_create_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance, instance_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -930,9 +1074,7 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -957,15 +1099,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[memorystore.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[memorystore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Updates the parameters of a single Instance. .. code-block:: python @@ -1039,8 +1182,10 @@ def sample_update_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1060,9 +1205,9 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -1087,14 +1232,15 @@ def sample_update_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Instance. .. code-block:: python @@ -1165,8 +1311,10 @@ def sample_delete_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1184,9 +1332,7 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1211,14 +1357,17 @@ def sample_delete_instance(): # Done; return the response. return response - def get_certificate_authority(self, - request: Optional[Union[memorystore.GetCertificateAuthorityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.CertificateAuthority: + def get_certificate_authority( + self, + request: Optional[ + Union[memorystore.GetCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Gets details about the certificate authority for an Instance. @@ -1277,8 +1426,10 @@ def sample_get_certificate_authority(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1291,14 +1442,14 @@ def sample_get_certificate_authority(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_certificate_authority] + rpc = self._transport._wrapped_methods[ + self._transport.get_certificate_authority + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1364,8 +1515,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1373,7 +1523,11 @@ def list_operations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1414,8 +1568,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1423,7 +1576,11 @@ def get_operation( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1468,15 +1625,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -1517,15 +1678,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -1563,8 +1728,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1572,7 +1736,11 @@ def get_location( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1613,8 +1781,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1622,15 +1789,19 @@ def list_locations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) -__all__ = ( - "MemorystoreClient", -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/pagers.py similarity index 78% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/pagers.py index efff474686e6..f793f8b5f67c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., memorystore.ListInstancesResponse], - request: memorystore.ListInstancesRequest, - response: memorystore.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., memorystore.ListInstancesResponse], + request: memorystore.ListInstancesRequest, + response: memorystore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -82,7 +99,12 @@ def pages(self) -> Iterator[memorystore.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[memorystore.Instance]: @@ -90,4 +112,4 @@ def __iter__(self) -> Iterator[memorystore.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py similarity index 77% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py index 6172c94a25d4..a1f2673d9bb5 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py @@ -17,16 +17,14 @@ from typing import Dict, Type from .base import MemorystoreTransport -from .rest import MemorystoreRestTransport -from .rest import MemorystoreRestInterceptor - +from .rest import MemorystoreRestInterceptor, MemorystoreRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] -_transport_registry['rest'] = MemorystoreRestTransport +_transport_registry["rest"] = MemorystoreRestTransport __all__ = ( - 'MemorystoreTransport', - 'MemorystoreRestTransport', - 'MemorystoreRestInterceptor', + "MemorystoreTransport", + "MemorystoreRestTransport", + "MemorystoreRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py similarity index 70% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py index 5b8147a83f2f..adfd6080032c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py @@ -16,44 +16,44 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.memorystore_v1beta import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1beta import gapic_version as package_version from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class MemorystoreTransport(abc.ABC): """Abstract transport class for Memorystore.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "memorystore.googleapis.com" - DEFAULT_HOST: str = 'memorystore.googleapis.com' def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -89,30 +89,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -209,14 +217,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -226,57 +234,63 @@ def operations_client(self): raise NotImplementedError() @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - Union[ - memorystore.ListInstancesResponse, - Awaitable[memorystore.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], + Union[ + memorystore.ListInstancesResponse, + Awaitable[memorystore.ListInstancesResponse], + ], + ]: raise NotImplementedError() @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - Union[ - memorystore.Instance, - Awaitable[memorystore.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [memorystore.GetInstanceRequest], + Union[memorystore.Instance, Awaitable[memorystore.Instance]], + ]: raise NotImplementedError() @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [memorystore.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [memorystore.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [memorystore.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - Union[ - memorystore.CertificateAuthority, - Awaitable[memorystore.CertificateAuthority] - ]]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + Union[ + memorystore.CertificateAuthority, + Awaitable[memorystore.CertificateAuthority], + ], + ]: raise NotImplementedError() @property @@ -284,7 +298,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -300,23 +317,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -324,10 +336,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -336,6 +352,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MemorystoreTransport', -) +__all__ = ("MemorystoreTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py similarity index 67% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py index 74bfb95d397d..6cbe62d803ff 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py @@ -14,31 +14,25 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseMemorystoreRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseMemorystoreRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -121,7 +115,12 @@ def post_update_instance(self, response): """ - def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + + def pre_create_instance( + self, + request: memorystore.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -129,7 +128,9 @@ def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance Override in a subclass to manipulate the response @@ -138,7 +139,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_delete_instance( + self, + request: memorystore.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -146,7 +151,9 @@ def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance Override in a subclass to manipulate the response @@ -155,7 +162,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_get_certificate_authority(self, request: memorystore.GetCertificateAuthorityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: + def pre_get_certificate_authority( + self, + request: memorystore.GetCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the request or metadata @@ -163,7 +174,9 @@ def pre_get_certificate_authority(self, request: memorystore.GetCertificateAutho """ return request, metadata - def post_get_certificate_authority(self, response: memorystore.CertificateAuthority) -> memorystore.CertificateAuthority: + def post_get_certificate_authority( + self, response: memorystore.CertificateAuthority + ) -> memorystore.CertificateAuthority: """Post-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the response @@ -172,7 +185,11 @@ def post_get_certificate_authority(self, response: memorystore.CertificateAuthor """ return response - def pre_get_instance(self, request: memorystore.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_get_instance( + self, + request: memorystore.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -189,7 +206,11 @@ def post_get_instance(self, response: memorystore.Instance) -> memorystore.Insta """ return response - def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + def pre_list_instances( + self, + request: memorystore.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -197,7 +218,9 @@ def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: memorystore.ListInstancesResponse) -> memorystore.ListInstancesResponse: + def post_list_instances( + self, response: memorystore.ListInstancesResponse + ) -> memorystore.ListInstancesResponse: """Post-rpc interceptor for list_instances Override in a subclass to manipulate the response @@ -206,7 +229,11 @@ def post_list_instances(self, response: memorystore.ListInstancesResponse) -> me """ return response - def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_update_instance( + self, + request: memorystore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -214,7 +241,9 @@ def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance Override in a subclass to manipulate the response @@ -224,7 +253,9 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location @@ -245,7 +276,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations @@ -266,7 +299,9 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation @@ -275,9 +310,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -287,7 +320,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation @@ -296,9 +331,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -308,7 +341,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation @@ -329,7 +364,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations @@ -369,20 +406,21 @@ class MemorystoreRestTransport(_BaseMemorystoreRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MemorystoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MemorystoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -425,10 +463,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -445,46 +484,51 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1beta") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateInstance(_BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub): + class _CreateInstance( + _BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CreateInstance") @@ -496,27 +540,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -536,17 +582,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -559,7 +621,9 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) return resp - class _DeleteInstance(_BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub): + class _DeleteInstance( + _BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteInstance") @@ -571,26 +635,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -610,15 +676,28 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + ) request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -631,7 +710,9 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) return resp - class _GetCertificateAuthority(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub): + class _GetCertificateAuthority( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetCertificateAuthority") @@ -643,26 +724,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetCertificateAuthorityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.CertificateAuthority: + def __call__( + self, + request: memorystore.GetCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Call the get certificate authority method over HTTP. Args: @@ -681,15 +764,30 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() - request, metadata = self._interceptor.pre_get_certificate_authority(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + ) + request, metadata = self._interceptor.pre_get_certificate_authority( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._GetCertificateAuthority._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetCertificateAuthority._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -704,7 +802,9 @@ def __call__(self, resp = self._interceptor.post_get_certificate_authority(resp) return resp - class _GetInstance(_BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub): + class _GetInstance( + _BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetInstance") @@ -716,26 +816,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.Instance: + def __call__( + self, + request: memorystore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Call the get instance method over HTTP. Args: @@ -752,15 +854,32 @@ def __call__(self, A Memorystore instance. """ - http_options = _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + ) request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -775,7 +894,9 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp - class _ListInstances(_BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub): + class _ListInstances( + _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListInstances") @@ -787,26 +908,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.ListInstancesResponse: + def __call__( + self, + request: memorystore.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -823,15 +946,30 @@ def __call__(self, Response message for [ListInstances][]. """ - http_options = _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + ) request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -846,7 +984,9 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp - class _UpdateInstance(_BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub): + class _UpdateInstance( + _BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.UpdateInstance") @@ -858,27 +998,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -898,17 +1040,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -922,58 +1080,64 @@ def __call__(self, return resp @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[memorystore.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[memorystore.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - memorystore.CertificateAuthority]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], memorystore.CertificateAuthority + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - memorystore.Instance]: + def get_instance( + self, + ) -> Callable[[memorystore.GetInstanceRequest], memorystore.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - memorystore.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], memorystore.ListInstancesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[memorystore.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub): + class _GetLocation( + _BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetLocation") @@ -985,27 +1149,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1021,15 +1186,32 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1044,9 +1226,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub): + class _ListLocations( + _BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListLocations") @@ -1058,27 +1242,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1094,15 +1279,30 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1117,9 +1317,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub): + class _CancelOperation( + _BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CancelOperation") @@ -1131,27 +1333,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1164,15 +1367,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1183,9 +1401,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub): + class _DeleteOperation( + _BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteOperation") @@ -1197,27 +1417,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1230,15 +1451,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1249,9 +1485,11 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub): + class _GetOperation( + _BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetOperation") @@ -1263,27 +1501,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1299,15 +1538,32 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1322,9 +1578,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub): + class _ListOperations( + _BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListOperations") @@ -1336,27 +1594,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1372,15 +1631,28 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1401,6 +1673,4 @@ def close(self): self._session.close() -__all__=( - 'MemorystoreRestTransport', -) +__all__ = ("MemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py similarity index 59% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py index 7374ad3d00d2..6c2af86b90d1 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py @@ -14,19 +14,17 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import MemorystoreTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, MemorystoreTransport class _BaseMemorystoreRestTransport(MemorystoreTransport): @@ -42,14 +40,16 @@ class _BaseMemorystoreRestTransport(MemorystoreTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +73,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,27 +86,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,17 +127,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -138,19 +152,23 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -162,11 +180,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -175,19 +199,23 @@ class _BaseGetCertificateAuthority: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority", + }, ] return http_options @@ -199,11 +227,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -212,19 +246,23 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -236,11 +274,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -249,19 +293,23 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -273,11 +321,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -286,20 +340,24 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1beta/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -314,17 +372,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -335,23 +399,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -360,23 +424,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -385,23 +449,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -410,23 +474,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -435,23 +499,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -460,26 +524,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseMemorystoreRestTransport', -) +__all__ = ("_BaseMemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py similarity index 69% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py index fc13543f2db2..a639afb23c9d 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py @@ -15,6 +15,7 @@ # from .memorystore import ( CertificateAuthority, + ConnectionType, CreateInstanceRequest, DeleteInstanceRequest, DiscoveryEndpoint, @@ -28,29 +29,28 @@ PersistenceConfig, PscAutoConnection, PscConnection, + PscConnectionStatus, UpdateInstanceRequest, ZoneDistributionConfig, - ConnectionType, - PscConnectionStatus, ) __all__ = ( - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', + "CertificateAuthority", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "UpdateInstanceRequest", + "ZoneDistributionConfig", + "ConnectionType", + "PscConnectionStatus", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py similarity index 93% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py index dd56d5678ceb..4cfef649dfae 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py @@ -17,33 +17,31 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.memorystore.v1beta', + package="google.cloud.memorystore.v1beta", manifest={ - 'PscConnectionStatus', - 'ConnectionType', - 'Instance', - 'PscAutoConnection', - 'PscConnection', - 'DiscoveryEndpoint', - 'PersistenceConfig', - 'NodeConfig', - 'ZoneDistributionConfig', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'GetCertificateAuthorityRequest', - 'CertificateAuthority', - 'OperationMetadata', + "PscConnectionStatus", + "ConnectionType", + "Instance", + "PscAutoConnection", + "PscConnection", + "DiscoveryEndpoint", + "PersistenceConfig", + "NodeConfig", + "ZoneDistributionConfig", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "GetCertificateAuthorityRequest", + "CertificateAuthority", + "OperationMetadata", }, ) @@ -162,6 +160,7 @@ class Instance(proto.Message): mode (google.cloud.memorystore_v1beta.types.Instance.Mode): Optional. The mode config for the instance. """ + class State(proto.Enum): r"""Possible states of the instance. @@ -295,11 +294,11 @@ class UpdateInfo(proto.Message): optional=True, ) - update_info: 'Instance.StateInfo.UpdateInfo' = proto.Field( + update_info: "Instance.StateInfo.UpdateInfo" = proto.Field( proto.MESSAGE, number=1, - oneof='info', - message='Instance.StateInfo.UpdateInfo', + oneof="info", + message="Instance.StateInfo.UpdateInfo", ) class InstanceEndpoint(proto.Message): @@ -315,10 +314,10 @@ class InstanceEndpoint(proto.Message): each service attachment in the cluster. """ - connections: MutableSequence['Instance.ConnectionDetail'] = proto.RepeatedField( + connections: MutableSequence["Instance.ConnectionDetail"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance.ConnectionDetail', + message="Instance.ConnectionDetail", ) class ConnectionDetail(proto.Message): @@ -345,17 +344,17 @@ class ConnectionDetail(proto.Message): This field is a member of `oneof`_ ``connection``. """ - psc_auto_connection: 'PscAutoConnection' = proto.Field( + psc_auto_connection: "PscAutoConnection" = proto.Field( proto.MESSAGE, number=1, - oneof='connection', - message='PscAutoConnection', + oneof="connection", + message="PscAutoConnection", ) - psc_connection: 'PscConnection' = proto.Field( + psc_connection: "PscConnection" = proto.Field( proto.MESSAGE, number=2, - oneof='connection', - message='PscConnection', + oneof="connection", + message="PscConnection", ) name: str = proto.Field( @@ -410,20 +409,20 @@ class ConnectionDetail(proto.Message): proto.INT32, number=11, ) - discovery_endpoints: MutableSequence['DiscoveryEndpoint'] = proto.RepeatedField( + discovery_endpoints: MutableSequence["DiscoveryEndpoint"] = proto.RepeatedField( proto.MESSAGE, number=12, - message='DiscoveryEndpoint', + message="DiscoveryEndpoint", ) node_type: NodeType = proto.Field( proto.ENUM, number=13, enum=NodeType, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=14, - message='PersistenceConfig', + message="PersistenceConfig", ) engine_version: str = proto.Field( proto.STRING, @@ -434,25 +433,25 @@ class ConnectionDetail(proto.Message): proto.STRING, number=16, ) - node_config: 'NodeConfig' = proto.Field( + node_config: "NodeConfig" = proto.Field( proto.MESSAGE, number=17, - message='NodeConfig', + message="NodeConfig", ) - zone_distribution_config: 'ZoneDistributionConfig' = proto.Field( + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( proto.MESSAGE, number=18, - message='ZoneDistributionConfig', + message="ZoneDistributionConfig", ) deletion_protection_enabled: bool = proto.Field( proto.BOOL, number=19, optional=True, ) - psc_auto_connections: MutableSequence['PscAutoConnection'] = proto.RepeatedField( + psc_auto_connections: MutableSequence["PscAutoConnection"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='PscAutoConnection', + message="PscAutoConnection", ) endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( proto.MESSAGE, @@ -514,7 +513,7 @@ class PscAutoConnection(proto.Message): port: int = proto.Field( proto.INT32, number=9, - oneof='ports', + oneof="ports", ) psc_connection_id: str = proto.Field( proto.STRING, @@ -540,15 +539,15 @@ class PscAutoConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -613,15 +612,15 @@ class PscConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -669,6 +668,7 @@ class PersistenceConfig(proto.Message): Optional. AOF configuration. This field will be ignored if mode is not AOF. """ + class PersistenceMode(proto.Enum): r"""Possible persistence modes. @@ -700,6 +700,7 @@ class RDBConfig(proto.Message): snapshots will be aligned. If not provided, the current time will be used. """ + class SnapshotPeriod(proto.Enum): r"""Possible snapshot periods. @@ -721,10 +722,10 @@ class SnapshotPeriod(proto.Enum): TWELVE_HOURS = 3 TWENTY_FOUR_HOURS = 4 - rdb_snapshot_period: 'PersistenceConfig.RDBConfig.SnapshotPeriod' = proto.Field( + rdb_snapshot_period: "PersistenceConfig.RDBConfig.SnapshotPeriod" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.RDBConfig.SnapshotPeriod', + enum="PersistenceConfig.RDBConfig.SnapshotPeriod", ) rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -739,6 +740,7 @@ class AOFConfig(proto.Message): append_fsync (google.cloud.memorystore_v1beta.types.PersistenceConfig.AOFConfig.AppendFsync): Optional. The fsync mode. """ + class AppendFsync(proto.Enum): r"""Possible fsync modes. @@ -762,10 +764,10 @@ class AppendFsync(proto.Enum): EVERY_SEC = 2 ALWAYS = 3 - append_fsync: 'PersistenceConfig.AOFConfig.AppendFsync' = proto.Field( + append_fsync: "PersistenceConfig.AOFConfig.AppendFsync" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.AOFConfig.AppendFsync', + enum="PersistenceConfig.AOFConfig.AppendFsync", ) mode: PersistenceMode = proto.Field( @@ -811,6 +813,7 @@ class ZoneDistributionConfig(proto.Message): Optional. Current zone distribution mode. Defaults to MULTI_ZONE. """ + class ZoneDistributionMode(proto.Enum): r"""Possible zone distribution modes. @@ -904,10 +907,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -986,10 +989,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1034,10 +1037,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1142,16 +1145,18 @@ class CertChain(proto.Message): number=1, ) - ca_certs: MutableSequence['CertificateAuthority.ManagedCertificateAuthority.CertChain'] = proto.RepeatedField( + ca_certs: MutableSequence[ + "CertificateAuthority.ManagedCertificateAuthority.CertChain" + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message='CertificateAuthority.ManagedCertificateAuthority.CertChain', + message="CertificateAuthority.ManagedCertificateAuthority.CertChain", ) managed_server_ca: ManagedCertificateAuthority = proto.Field( proto.MESSAGE, number=2, - oneof='server_ca', + oneof="server_ca", message=ManagedCertificateAuthority, ) name: str = proto.Field( diff --git a/owl-bot-staging/google-cloud-memorystore/v1/mypy.ini b/packages/google-cloud-memorystore/mypy.ini similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/mypy.ini rename to packages/google-cloud-memorystore/mypy.ini diff --git a/packages/google-cloud-memorystore/noxfile.py b/packages/google-cloud-memorystore/noxfile.py new file mode 100644 index 000000000000..a9ceef47133c --- /dev/null +++ b/packages/google-cloud-memorystore/noxfile.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json rename to packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json rename to packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json diff --git a/packages/google-cloud-memorystore/scripts/decrypt-secrets.sh b/packages/google-cloud-memorystore/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-cloud-memorystore/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py b/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py rename to packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py b/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1beta_keywords.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py rename to packages/google-cloud-memorystore/scripts/fixup_memorystore_v1beta_keywords.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py b/packages/google-cloud-memorystore/setup.py similarity index 93% rename from owl-bot-staging/google-cloud-memorystore/v1beta/setup.py rename to packages/google-cloud-memorystore/setup.py index 3cb03e5a17ca..f85d7df9f3c1 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py +++ b/packages/google-cloud-memorystore/setup.py @@ -17,20 +17,22 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-memorystore' +name = "google-cloud-memorystore" description = "Google Cloud Memorystore API client library" version = None -with open(os.path.join(package_root, 'google/cloud/memorystore/gapic_version.py')) as fp: +with open( + os.path.join(package_root, "google/cloud/memorystore/gapic_version.py") +) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -47,8 +49,7 @@ "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/google-cloud-memorystore/testing/.gitignore b/packages/google-cloud-memorystore/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-memorystore/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt b/packages/google-cloud-memorystore/testing/constraints-3.10.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt rename to packages/google-cloud-memorystore/testing/constraints-3.10.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt b/packages/google-cloud-memorystore/testing/constraints-3.11.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt rename to packages/google-cloud-memorystore/testing/constraints-3.11.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt b/packages/google-cloud-memorystore/testing/constraints-3.12.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt rename to packages/google-cloud-memorystore/testing/constraints-3.12.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt b/packages/google-cloud-memorystore/testing/constraints-3.13.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt rename to packages/google-cloud-memorystore/testing/constraints-3.13.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt b/packages/google-cloud-memorystore/testing/constraints-3.7.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt rename to packages/google-cloud-memorystore/testing/constraints-3.7.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt b/packages/google-cloud-memorystore/testing/constraints-3.8.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt rename to packages/google-cloud-memorystore/testing/constraints-3.8.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt b/packages/google-cloud-memorystore/testing/constraints-3.9.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt rename to packages/google-cloud-memorystore/testing/constraints-3.9.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py b/packages/google-cloud-memorystore/tests/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py rename to packages/google-cloud-memorystore/tests/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-memorystore/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py b/packages/google-cloud-memorystore/tests/unit/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py rename to packages/google-cloud-memorystore/tests/unit/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py +++ b/packages/google-cloud-memorystore/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py b/packages/google-cloud-memorystore/tests/unit/gapic/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py rename to packages/google-cloud-memorystore/tests/unit/gapic/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py rename to packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py similarity index 64% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py rename to packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py index 75206a8bb252..52921e82fffd 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,51 +22,56 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.cloud.memorystore_v1.services.memorystore import MemorystoreClient -from google.cloud.memorystore_v1.services.memorystore import pagers -from google.cloud.memorystore_v1.services.memorystore import transports -from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth + +from google.cloud.memorystore_v1.services.memorystore import ( + MemorystoreClient, + pagers, + transports, +) +from google.cloud.memorystore_v1.types import memorystore async def mock_async_gen(data, chunk_size=1): @@ -73,9 +79,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -83,17 +91,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -104,12 +122,24 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert MemorystoreClient._get_default_mtls_endpoint(None) is None - assert MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert MemorystoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + def test__read_environment_variables(): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -119,16 +149,25 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert MemorystoreClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MemorystoreClient._read_environment_variables() == (False, "always", None) + assert MemorystoreClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -136,65 +175,149 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MemorystoreClient._read_environment_variables() == (False, "auto", "foo.com") + assert MemorystoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert MemorystoreClient._get_client_cert_source(None, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MemorystoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MemorystoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MemorystoreClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert MemorystoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + MemorystoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert MemorystoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MemorystoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MemorystoreClient._get_universe_domain(None, None) == MemorystoreClient._DEFAULT_UNIVERSE + assert ( + MemorystoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MemorystoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MemorystoreClient._get_universe_domain(None, None) + == MemorystoreClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: MemorystoreClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -202,48 +325,64 @@ def test_memorystore_client_from_service_account_info(client_class, transport_na assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) @@ -258,27 +397,34 @@ def test_memorystore_client_get_transport_class(): assert transport == transports.MemorystoreRestTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) -def test_memorystore_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) +def test_memorystore_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -296,13 +442,15 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -314,7 +462,7 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -334,23 +482,33 @@ def test_memorystore_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -359,43 +517,63 @@ def test_memorystore_client_client_options(client_class, transport_class, transp api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_memorystore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_memorystore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -414,12 +592,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -440,15 +628,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -458,18 +653,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient) +) def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -477,8 +676,12 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -496,16 +699,28 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -515,33 +730,55 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test_memorystore_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -564,11 +801,19 @@ def test_memorystore_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -576,25 +821,34 @@ def test_memorystore_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -603,22 +857,28 @@ def test_memorystore_client_client_options_scopes(client_class, transport_class, api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), -]) -def test_memorystore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), + ], +) +def test_memorystore_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +906,9 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -662,57 +924,69 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_instances_rest_required_fields(request_type=memorystore.ListInstancesRequest): +def test_list_instances_rest_required_fields( + request_type=memorystore.ListInstancesRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -723,23 +997,33 @@ def test_list_instances_rest_required_fields(request_type=memorystore.ListInstan return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_instances_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_instances_rest_flattened(): @@ -749,16 +1033,16 @@ def test_list_instances_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -768,7 +1052,7 @@ def test_list_instances_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_instances(**mock_args) @@ -777,10 +1061,13 @@ def test_list_instances_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_list_instances_rest_flattened_error(transport: str = 'rest'): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -791,20 +1078,20 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( memorystore.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_instances_rest_pager(transport: str = 'rest'): +def test_list_instances_rest_pager(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( memorystore.ListInstancesResponse( @@ -813,17 +1100,17 @@ def test_list_instances_rest_pager(transport: str = 'rest'): memorystore.Instance(), memorystore.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), memorystore.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), memorystore.ListInstancesResponse( instances=[ memorystore.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), memorystore.ListInstancesResponse( instances=[ @@ -839,21 +1126,20 @@ def test_list_instances_rest_pager(transport: str = 'rest'): response = tuple(memorystore.ListInstancesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, memorystore.Instance) - for i in results) + assert all(isinstance(i, memorystore.Instance) for i in results) pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -875,7 +1161,9 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -898,48 +1186,51 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -950,23 +1241,23 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_rest_flattened(): @@ -976,16 +1267,18 @@ def test_get_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -995,7 +1288,7 @@ def test_get_instance_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_instance(**mock_args) @@ -1004,10 +1297,13 @@ def test_get_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1018,7 +1314,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( memorystore.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1040,7 +1336,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -1060,7 +1358,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_instance_rest_required_fields(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=memorystore.CreateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} @@ -1068,65 +1368,73 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -1136,17 +1444,33 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns "instanceId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", "requestId", )) & set(("parent", "instanceId", "instance", ))) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) def test_create_instance_rest_flattened(): @@ -1156,18 +1480,18 @@ def test_create_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) mock_args.update(sample_request) @@ -1175,7 +1499,7 @@ def test_create_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_instance(**mock_args) @@ -1184,10 +1508,13 @@ def test_create_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_create_instance_rest_flattened_error(transport: str = 'rest'): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1198,9 +1525,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( memorystore.CreateInstanceRequest(), - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) @@ -1222,7 +1549,9 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -1242,77 +1571,95 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_instance_rest_required_fields(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_required_fields( + request_type=memorystore.UpdateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("instance", ))) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("instance",)) + ) def test_update_instance_rest_flattened(): @@ -1322,17 +1669,19 @@ def test_update_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } # get truthy value for each flattened field mock_args = dict( - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -1340,7 +1689,7 @@ def test_update_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.update_instance(**mock_args) @@ -1349,10 +1698,14 @@ def test_update_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_update_instance_rest_flattened_error(transport: str = 'rest'): +def test_update_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1363,8 +1716,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( memorystore.UpdateInstanceRequest(), - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1386,7 +1739,9 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -1406,57 +1761,62 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_required_fields( + request_type=memorystore.DeleteInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1464,23 +1824,23 @@ def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteIns response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) def test_delete_instance_rest_flattened(): @@ -1490,16 +1850,18 @@ def test_delete_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1507,7 +1869,7 @@ def test_delete_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_instance(**mock_args) @@ -1516,10 +1878,13 @@ def test_delete_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1530,7 +1895,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( memorystore.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1548,12 +1913,19 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_certificate_authority in client._transport._wrapped_methods + assert ( + client._transport.get_certificate_authority + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_certificate_authority] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_certificate_authority + ] = mock_rpc request = {} client.get_certificate_authority(request) @@ -1568,55 +1940,60 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_certificate_authority_rest_required_fields(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_required_fields( + request_type=memorystore.GetCertificateAuthorityRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1627,23 +2004,23 @@ def test_get_certificate_authority_rest_required_fields(request_type=memorystore return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_certificate_authority_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_certificate_authority._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_certificate_authority_rest_flattened(): @@ -1653,16 +2030,18 @@ def test_get_certificate_authority_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1672,7 +2051,7 @@ def test_get_certificate_authority_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_certificate_authority(**mock_args) @@ -1681,10 +2060,14 @@ def test_get_certificate_authority_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority" + % client.transport._host, + args[1], + ) -def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest'): +def test_get_certificate_authority_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1695,7 +2078,7 @@ def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.get_certificate_authority( memorystore.GetCertificateAuthorityRequest(), - name='name_value', + name="name_value", ) @@ -1737,8 +2120,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = MemorystoreClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -1761,16 +2143,20 @@ def test_transport_instance(): assert client.transport is transport -@pytest.mark.parametrize("transport_class", [ - transports.MemorystoreRestTransport, -]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MemorystoreRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_rest(): transport = MemorystoreClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -1780,18 +2166,19 @@ def test_transport_kind_rest(): def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1799,26 +2186,28 @@ def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesR client.list_instances(request) -@pytest.mark.parametrize("request_type", [ - memorystore.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.ListInstancesRequest, + dict, + ], +) def test_list_instances_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -1828,31 +2217,40 @@ def test_list_instances_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_list_instances") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_instances" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.ListInstancesRequest.pb(memorystore.ListInstancesRequest()) + pb_message = memorystore.ListInstancesRequest.pb( + memorystore.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1862,18 +2260,26 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.ListInstancesResponse.to_json(memorystore.ListInstancesResponse()) + return_value = memorystore.ListInstancesResponse.to_json( + memorystore.ListInstancesResponse() + ) req.return_value.content = return_value request = memorystore.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.ListInstancesResponse() - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -1881,18 +2287,19 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1900,35 +2307,37 @@ def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceReque client.get_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetInstanceRequest, + dict, + ], +) def test_get_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance( - name='name_value', - state=memorystore.Instance.State.CREATING, - uid='uid_value', - replica_count=1384, - authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, - transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, - shard_count=1178, - node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, - engine_version='engine_version_value', - deletion_protection_enabled=True, - mode=memorystore.Instance.Mode.STANDALONE, + name="name_value", + state=memorystore.Instance.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version="engine_version_value", + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, ) # Wrap the value into a proper Response obj @@ -1938,21 +2347,27 @@ def test_get_instance_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.Instance) - assert response.name == 'name_value' + assert response.name == "name_value" assert response.state == memorystore.Instance.State.CREATING - assert response.uid == 'uid_value' + assert response.uid == "uid_value" assert response.replica_count == 1384 - assert response.authorization_mode == memorystore.Instance.AuthorizationMode.AUTH_DISABLED - assert response.transit_encryption_mode == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + assert ( + response.authorization_mode + == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + ) + assert ( + response.transit_encryption_mode + == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + ) assert response.shard_count == 1178 assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO - assert response.engine_version == 'engine_version_value' + assert response.engine_version == "engine_version_value" assert response.deletion_protection_enabled is True assert response.mode == memorystore.Instance.Mode.STANDALONE @@ -1961,14 +2376,21 @@ def test_get_instance_rest_call_success(request_type): def test_get_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) @@ -1985,33 +2407,42 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.Instance() - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_bad_request( + request_type=memorystore.CreateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2019,19 +2450,82 @@ def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanc client.create_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.CreateInstanceRequest, + dict, + ], +) def test_create_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2051,7 +2545,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2065,7 +2559,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2080,12 +2574,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2098,15 +2596,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -2118,18 +2616,28 @@ def get_message_fields(field): def test_create_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_create_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_create_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.CreateInstanceRequest.pb(memorystore.CreateInstanceRequest()) + pb_message = memorystore.CreateInstanceRequest.pb( + memorystore.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2143,33 +2651,44 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_bad_request( + request_type=memorystore.UpdateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2177,19 +2696,84 @@ def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanc client.update_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.UpdateInstanceRequest, + dict, + ], +) def test_update_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2209,7 +2793,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2223,7 +2807,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2238,12 +2822,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2256,15 +2844,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) @@ -2276,18 +2864,28 @@ def get_message_fields(field): def test_update_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_update_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_update_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.UpdateInstanceRequest.pb(memorystore.UpdateInstanceRequest()) + pb_message = memorystore.UpdateInstanceRequest.pb( + memorystore.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2301,33 +2899,42 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_bad_request( + request_type=memorystore.DeleteInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2335,30 +2942,32 @@ def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanc client.delete_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) @@ -2370,18 +2979,28 @@ def test_delete_instance_rest_call_success(request_type): def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_delete_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_delete_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.DeleteInstanceRequest.pb(memorystore.DeleteInstanceRequest()) + pb_message = memorystore.DeleteInstanceRequest.pb( + memorystore.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2395,33 +3014,42 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_certificate_authority_rest_bad_request(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_bad_request( + request_type=memorystore.GetCertificateAuthorityRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2429,25 +3057,27 @@ def test_get_certificate_authority_rest_bad_request(request_type=memorystore.Get client.get_certificate_authority(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetCertificateAuthorityRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetCertificateAuthorityRequest, + dict, + ], +) def test_get_certificate_authority_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority( - name='name_value', + name="name_value", ) # Wrap the value into a proper Response obj @@ -2457,30 +3087,39 @@ def test_get_certificate_authority_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.CertificateAuthority) - assert response.name == 'name_value' + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_certificate_authority_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_certificate_authority") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_certificate_authority") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_certificate_authority" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.GetCertificateAuthorityRequest.pb(memorystore.GetCertificateAuthorityRequest()) + pb_message = memorystore.GetCertificateAuthorityRequest.pb( + memorystore.GetCertificateAuthorityRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2490,18 +3129,26 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.CertificateAuthority.to_json(memorystore.CertificateAuthority()) + return_value = memorystore.CertificateAuthority.to_json( + memorystore.CertificateAuthority() + ) req.return_value.content = return_value request = memorystore.GetCertificateAuthorityRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.CertificateAuthority() - client.get_certificate_authority(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_certificate_authority( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -2513,13 +3160,17 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2527,20 +3178,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -2548,7 +3202,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2558,19 +3212,23 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2578,20 +3236,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -2599,7 +3260,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2609,19 +3270,25 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2629,28 +3296,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2660,19 +3330,25 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2680,28 +3356,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2711,19 +3390,25 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2731,20 +3416,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -2752,7 +3440,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2762,19 +3450,25 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2782,20 +3476,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -2803,7 +3500,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2812,10 +3509,10 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_initialize_client_w_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -2829,9 +3526,7 @@ def test_list_instances_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -2851,9 +3546,7 @@ def test_get_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -2873,9 +3566,7 @@ def test_create_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -2895,9 +3586,7 @@ def test_update_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -2917,9 +3606,7 @@ def test_delete_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -2940,8 +3627,8 @@ def test_get_certificate_authority_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_certificate_authority), - '__call__') as call: + type(client.transport.get_certificate_authority), "__call__" + ) as call: client.get_certificate_authority(request=None) # Establish that the underlying stub method was called. @@ -2962,7 +3649,7 @@ def test_memorystore_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. @@ -2974,13 +3661,15 @@ def test_memorystore_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_memorystore_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport.__init__') as Transport: + with mock.patch( + "google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2989,18 +3678,18 @@ def test_memorystore_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_instances', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'get_certificate_authority', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "get_certificate_authority", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3016,7 +3705,7 @@ def test_memorystore_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3025,25 +3714,30 @@ def test_memorystore_base_transport(): def test_memorystore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_memorystore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport() @@ -3052,61 +3746,75 @@ def test_memorystore_base_transport_with_adc(): def test_memorystore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) MemorystoreClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) def test_memorystore_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MemorystoreRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MemorystoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_no_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_with_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com:8000' + "memorystore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -3137,11 +3845,16 @@ def test_memorystore_client_transport_session_collision(transport_name): session2 = client2.transport.get_certificate_authority._session assert session1 != session2 + def test_certificate_authority_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.certificate_authority_path(project, location, instance) assert expected == actual @@ -3158,11 +3871,18 @@ def test_parse_certificate_authority_path(): actual = MemorystoreClient.parse_certificate_authority_path(path) assert expected == actual + def test_forwarding_rule_path(): project = "cuttlefish" region = "mussel" forwarding_rule = "winkle" - expected = "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + expected = ( + "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) + ) actual = MemorystoreClient.forwarding_rule_path(project, region, forwarding_rule) assert expected == actual @@ -3179,11 +3899,16 @@ def test_parse_forwarding_rule_path(): actual = MemorystoreClient.parse_forwarding_rule_path(path) assert expected == actual + def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.instance_path(project, location, instance) assert expected == actual @@ -3200,10 +3925,14 @@ def test_parse_instance_path(): actual = MemorystoreClient.parse_instance_path(path) assert expected == actual + def test_network_path(): project = "cuttlefish" network = "mussel" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) actual = MemorystoreClient.network_path(project, network) assert expected == actual @@ -3219,12 +3948,19 @@ def test_parse_network_path(): actual = MemorystoreClient.parse_network_path(path) assert expected == actual + def test_service_attachment_path(): project = "scallop" region = "abalone" service_attachment = "squid" - expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) - actual = MemorystoreClient.service_attachment_path(project, region, service_attachment) + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = MemorystoreClient.service_attachment_path( + project, region, service_attachment + ) assert expected == actual @@ -3240,9 +3976,12 @@ def test_parse_service_attachment_path(): actual = MemorystoreClient.parse_service_attachment_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = MemorystoreClient.common_billing_account_path(billing_account) assert expected == actual @@ -3257,9 +3996,12 @@ def test_parse_common_billing_account_path(): actual = MemorystoreClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MemorystoreClient.common_folder_path(folder) assert expected == actual @@ -3274,9 +4016,12 @@ def test_parse_common_folder_path(): actual = MemorystoreClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MemorystoreClient.common_organization_path(organization) assert expected == actual @@ -3291,9 +4036,12 @@ def test_parse_common_organization_path(): actual = MemorystoreClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = MemorystoreClient.common_project_path(project) assert expected == actual @@ -3308,10 +4056,14 @@ def test_parse_common_project_path(): actual = MemorystoreClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = MemorystoreClient.common_location_path(project, location) assert expected == actual @@ -3331,14 +4083,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: transport_class = MemorystoreClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3349,10 +4105,11 @@ def test_client_with_default_client_info(): def test_transport_close_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -3360,12 +4117,11 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', + "rest", ] for transport in transports: client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -3374,9 +4130,13 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (MemorystoreClient, transports.MemorystoreRestTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MemorystoreClient, transports.MemorystoreRestTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -3391,7 +4151,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py similarity index 64% rename from owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py rename to packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py index 25ba80cdffb7..c0873f072812 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,51 +22,56 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.cloud.memorystore_v1beta.services.memorystore import MemorystoreClient -from google.cloud.memorystore_v1beta.services.memorystore import pagers -from google.cloud.memorystore_v1beta.services.memorystore import transports -from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth + +from google.cloud.memorystore_v1beta.services.memorystore import ( + MemorystoreClient, + pagers, + transports, +) +from google.cloud.memorystore_v1beta.types import memorystore async def mock_async_gen(data, chunk_size=1): @@ -73,9 +79,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -83,17 +91,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -104,12 +122,24 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert MemorystoreClient._get_default_mtls_endpoint(None) is None - assert MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert MemorystoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + def test__read_environment_variables(): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -119,16 +149,25 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert MemorystoreClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MemorystoreClient._read_environment_variables() == (False, "always", None) + assert MemorystoreClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -136,65 +175,149 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MemorystoreClient._read_environment_variables() == (False, "auto", "foo.com") + assert MemorystoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert MemorystoreClient._get_client_cert_source(None, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MemorystoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MemorystoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MemorystoreClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert MemorystoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + MemorystoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert MemorystoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MemorystoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MemorystoreClient._get_universe_domain(None, None) == MemorystoreClient._DEFAULT_UNIVERSE + assert ( + MemorystoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MemorystoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MemorystoreClient._get_universe_domain(None, None) + == MemorystoreClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: MemorystoreClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -202,48 +325,64 @@ def test_memorystore_client_from_service_account_info(client_class, transport_na assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) @@ -258,27 +397,34 @@ def test_memorystore_client_get_transport_class(): assert transport == transports.MemorystoreRestTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) -def test_memorystore_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) +def test_memorystore_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -296,13 +442,15 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -314,7 +462,7 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -334,23 +482,33 @@ def test_memorystore_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -359,43 +517,63 @@ def test_memorystore_client_client_options(client_class, transport_class, transp api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_memorystore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_memorystore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -414,12 +592,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -440,15 +628,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -458,18 +653,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient) +) def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -477,8 +676,12 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -496,16 +699,28 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -515,33 +730,55 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test_memorystore_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -564,11 +801,19 @@ def test_memorystore_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -576,25 +821,34 @@ def test_memorystore_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -603,22 +857,28 @@ def test_memorystore_client_client_options_scopes(client_class, transport_class, api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), -]) -def test_memorystore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), + ], +) +def test_memorystore_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +906,9 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -662,57 +924,69 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_instances_rest_required_fields(request_type=memorystore.ListInstancesRequest): +def test_list_instances_rest_required_fields( + request_type=memorystore.ListInstancesRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -723,23 +997,33 @@ def test_list_instances_rest_required_fields(request_type=memorystore.ListInstan return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_instances_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_instances_rest_flattened(): @@ -749,16 +1033,16 @@ def test_list_instances_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -768,7 +1052,7 @@ def test_list_instances_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_instances(**mock_args) @@ -777,10 +1061,14 @@ def test_list_instances_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/instances" + % client.transport._host, + args[1], + ) -def test_list_instances_rest_flattened_error(transport: str = 'rest'): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -791,20 +1079,20 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( memorystore.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_instances_rest_pager(transport: str = 'rest'): +def test_list_instances_rest_pager(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( memorystore.ListInstancesResponse( @@ -813,17 +1101,17 @@ def test_list_instances_rest_pager(transport: str = 'rest'): memorystore.Instance(), memorystore.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), memorystore.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), memorystore.ListInstancesResponse( instances=[ memorystore.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), memorystore.ListInstancesResponse( instances=[ @@ -839,21 +1127,20 @@ def test_list_instances_rest_pager(transport: str = 'rest'): response = tuple(memorystore.ListInstancesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, memorystore.Instance) - for i in results) + assert all(isinstance(i, memorystore.Instance) for i in results) pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -875,7 +1162,9 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -898,48 +1187,51 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -950,23 +1242,23 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_rest_flattened(): @@ -976,16 +1268,18 @@ def test_get_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -995,7 +1289,7 @@ def test_get_instance_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_instance(**mock_args) @@ -1004,10 +1298,14 @@ def test_get_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1018,7 +1316,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( memorystore.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1040,7 +1338,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -1060,7 +1360,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_instance_rest_required_fields(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=memorystore.CreateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} @@ -1068,65 +1370,73 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -1136,17 +1446,33 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns "instanceId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", "requestId", )) & set(("parent", "instanceId", "instance", ))) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) def test_create_instance_rest_flattened(): @@ -1156,18 +1482,18 @@ def test_create_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) mock_args.update(sample_request) @@ -1175,7 +1501,7 @@ def test_create_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_instance(**mock_args) @@ -1184,10 +1510,14 @@ def test_create_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/instances" + % client.transport._host, + args[1], + ) -def test_create_instance_rest_flattened_error(transport: str = 'rest'): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1198,9 +1528,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( memorystore.CreateInstanceRequest(), - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) @@ -1222,7 +1552,9 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -1242,77 +1574,95 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_instance_rest_required_fields(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_required_fields( + request_type=memorystore.UpdateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("instance", ))) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("instance",)) + ) def test_update_instance_rest_flattened(): @@ -1322,17 +1672,19 @@ def test_update_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } # get truthy value for each flattened field mock_args = dict( - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -1340,7 +1692,7 @@ def test_update_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.update_instance(**mock_args) @@ -1349,10 +1701,14 @@ def test_update_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_update_instance_rest_flattened_error(transport: str = 'rest'): +def test_update_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1363,8 +1719,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( memorystore.UpdateInstanceRequest(), - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1386,7 +1742,9 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -1406,57 +1764,62 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_required_fields( + request_type=memorystore.DeleteInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1464,23 +1827,23 @@ def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteIns response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) def test_delete_instance_rest_flattened(): @@ -1490,16 +1853,18 @@ def test_delete_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1507,7 +1872,7 @@ def test_delete_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_instance(**mock_args) @@ -1516,10 +1881,14 @@ def test_delete_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1530,7 +1899,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( memorystore.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1548,12 +1917,19 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_certificate_authority in client._transport._wrapped_methods + assert ( + client._transport.get_certificate_authority + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_certificate_authority] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_certificate_authority + ] = mock_rpc request = {} client.get_certificate_authority(request) @@ -1568,55 +1944,60 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_certificate_authority_rest_required_fields(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_required_fields( + request_type=memorystore.GetCertificateAuthorityRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1627,23 +2008,23 @@ def test_get_certificate_authority_rest_required_fields(request_type=memorystore return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_certificate_authority_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_certificate_authority._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_certificate_authority_rest_flattened(): @@ -1653,16 +2034,18 @@ def test_get_certificate_authority_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1672,7 +2055,7 @@ def test_get_certificate_authority_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_certificate_authority(**mock_args) @@ -1681,10 +2064,14 @@ def test_get_certificate_authority_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority" + % client.transport._host, + args[1], + ) -def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest'): +def test_get_certificate_authority_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1695,7 +2082,7 @@ def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.get_certificate_authority( memorystore.GetCertificateAuthorityRequest(), - name='name_value', + name="name_value", ) @@ -1737,8 +2124,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = MemorystoreClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -1761,16 +2147,20 @@ def test_transport_instance(): assert client.transport is transport -@pytest.mark.parametrize("transport_class", [ - transports.MemorystoreRestTransport, -]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MemorystoreRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_rest(): transport = MemorystoreClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -1780,18 +2170,19 @@ def test_transport_kind_rest(): def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1799,26 +2190,28 @@ def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesR client.list_instances(request) -@pytest.mark.parametrize("request_type", [ - memorystore.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.ListInstancesRequest, + dict, + ], +) def test_list_instances_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -1828,31 +2221,40 @@ def test_list_instances_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_list_instances") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_instances" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.ListInstancesRequest.pb(memorystore.ListInstancesRequest()) + pb_message = memorystore.ListInstancesRequest.pb( + memorystore.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1862,18 +2264,26 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.ListInstancesResponse.to_json(memorystore.ListInstancesResponse()) + return_value = memorystore.ListInstancesResponse.to_json( + memorystore.ListInstancesResponse() + ) req.return_value.content = return_value request = memorystore.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.ListInstancesResponse() - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -1881,18 +2291,19 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1900,35 +2311,37 @@ def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceReque client.get_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetInstanceRequest, + dict, + ], +) def test_get_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance( - name='name_value', - state=memorystore.Instance.State.CREATING, - uid='uid_value', - replica_count=1384, - authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, - transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, - shard_count=1178, - node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, - engine_version='engine_version_value', - deletion_protection_enabled=True, - mode=memorystore.Instance.Mode.STANDALONE, + name="name_value", + state=memorystore.Instance.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version="engine_version_value", + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, ) # Wrap the value into a proper Response obj @@ -1938,21 +2351,27 @@ def test_get_instance_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.Instance) - assert response.name == 'name_value' + assert response.name == "name_value" assert response.state == memorystore.Instance.State.CREATING - assert response.uid == 'uid_value' + assert response.uid == "uid_value" assert response.replica_count == 1384 - assert response.authorization_mode == memorystore.Instance.AuthorizationMode.AUTH_DISABLED - assert response.transit_encryption_mode == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + assert ( + response.authorization_mode + == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + ) + assert ( + response.transit_encryption_mode + == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + ) assert response.shard_count == 1178 assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO - assert response.engine_version == 'engine_version_value' + assert response.engine_version == "engine_version_value" assert response.deletion_protection_enabled is True assert response.mode == memorystore.Instance.Mode.STANDALONE @@ -1961,14 +2380,21 @@ def test_get_instance_rest_call_success(request_type): def test_get_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) @@ -1985,33 +2411,42 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.Instance() - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_bad_request( + request_type=memorystore.CreateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2019,19 +2454,82 @@ def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanc client.create_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.CreateInstanceRequest, + dict, + ], +) def test_create_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2051,7 +2549,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2065,7 +2563,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2080,12 +2578,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2098,15 +2600,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -2118,18 +2620,28 @@ def get_message_fields(field): def test_create_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_create_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_create_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.CreateInstanceRequest.pb(memorystore.CreateInstanceRequest()) + pb_message = memorystore.CreateInstanceRequest.pb( + memorystore.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2143,33 +2655,44 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_bad_request( + request_type=memorystore.UpdateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2177,19 +2700,84 @@ def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanc client.update_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.UpdateInstanceRequest, + dict, + ], +) def test_update_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2209,7 +2797,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2223,7 +2811,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2238,12 +2826,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2256,15 +2848,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) @@ -2276,18 +2868,28 @@ def get_message_fields(field): def test_update_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_update_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_update_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.UpdateInstanceRequest.pb(memorystore.UpdateInstanceRequest()) + pb_message = memorystore.UpdateInstanceRequest.pb( + memorystore.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2301,33 +2903,42 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_bad_request( + request_type=memorystore.DeleteInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2335,30 +2946,32 @@ def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanc client.delete_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) @@ -2370,18 +2983,28 @@ def test_delete_instance_rest_call_success(request_type): def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_delete_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_delete_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.DeleteInstanceRequest.pb(memorystore.DeleteInstanceRequest()) + pb_message = memorystore.DeleteInstanceRequest.pb( + memorystore.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2395,33 +3018,42 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_certificate_authority_rest_bad_request(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_bad_request( + request_type=memorystore.GetCertificateAuthorityRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2429,25 +3061,27 @@ def test_get_certificate_authority_rest_bad_request(request_type=memorystore.Get client.get_certificate_authority(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetCertificateAuthorityRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetCertificateAuthorityRequest, + dict, + ], +) def test_get_certificate_authority_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority( - name='name_value', + name="name_value", ) # Wrap the value into a proper Response obj @@ -2457,30 +3091,39 @@ def test_get_certificate_authority_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.CertificateAuthority) - assert response.name == 'name_value' + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_certificate_authority_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_certificate_authority") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_certificate_authority") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_certificate_authority" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.GetCertificateAuthorityRequest.pb(memorystore.GetCertificateAuthorityRequest()) + pb_message = memorystore.GetCertificateAuthorityRequest.pb( + memorystore.GetCertificateAuthorityRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2490,18 +3133,26 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.CertificateAuthority.to_json(memorystore.CertificateAuthority()) + return_value = memorystore.CertificateAuthority.to_json( + memorystore.CertificateAuthority() + ) req.return_value.content = return_value request = memorystore.GetCertificateAuthorityRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.CertificateAuthority() - client.get_certificate_authority(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_certificate_authority( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -2513,13 +3164,17 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2527,20 +3182,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -2548,7 +3206,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2558,19 +3216,23 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2578,20 +3240,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -2599,7 +3264,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2609,19 +3274,25 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2629,28 +3300,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2660,19 +3334,25 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2680,28 +3360,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2711,19 +3394,25 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2731,20 +3420,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -2752,7 +3444,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2762,19 +3454,25 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2782,20 +3480,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -2803,7 +3504,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2812,10 +3513,10 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_initialize_client_w_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -2829,9 +3530,7 @@ def test_list_instances_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -2851,9 +3550,7 @@ def test_get_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -2873,9 +3570,7 @@ def test_create_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -2895,9 +3590,7 @@ def test_update_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -2917,9 +3610,7 @@ def test_delete_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -2940,8 +3631,8 @@ def test_get_certificate_authority_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_certificate_authority), - '__call__') as call: + type(client.transport.get_certificate_authority), "__call__" + ) as call: client.get_certificate_authority(request=None) # Establish that the underlying stub method was called. @@ -2962,7 +3653,7 @@ def test_memorystore_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. @@ -2974,13 +3665,15 @@ def test_memorystore_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_memorystore_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport.__init__') as Transport: + with mock.patch( + "google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2989,18 +3682,18 @@ def test_memorystore_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_instances', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'get_certificate_authority', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "get_certificate_authority", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3016,7 +3709,7 @@ def test_memorystore_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3025,25 +3718,30 @@ def test_memorystore_base_transport(): def test_memorystore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_memorystore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport() @@ -3052,61 +3750,75 @@ def test_memorystore_base_transport_with_adc(): def test_memorystore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) MemorystoreClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) def test_memorystore_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MemorystoreRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MemorystoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_no_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_with_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com:8000' + "memorystore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -3137,11 +3849,16 @@ def test_memorystore_client_transport_session_collision(transport_name): session2 = client2.transport.get_certificate_authority._session assert session1 != session2 + def test_certificate_authority_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.certificate_authority_path(project, location, instance) assert expected == actual @@ -3158,11 +3875,18 @@ def test_parse_certificate_authority_path(): actual = MemorystoreClient.parse_certificate_authority_path(path) assert expected == actual + def test_forwarding_rule_path(): project = "cuttlefish" region = "mussel" forwarding_rule = "winkle" - expected = "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + expected = ( + "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) + ) actual = MemorystoreClient.forwarding_rule_path(project, region, forwarding_rule) assert expected == actual @@ -3179,11 +3903,16 @@ def test_parse_forwarding_rule_path(): actual = MemorystoreClient.parse_forwarding_rule_path(path) assert expected == actual + def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.instance_path(project, location, instance) assert expected == actual @@ -3200,10 +3929,14 @@ def test_parse_instance_path(): actual = MemorystoreClient.parse_instance_path(path) assert expected == actual + def test_network_path(): project = "cuttlefish" network = "mussel" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) actual = MemorystoreClient.network_path(project, network) assert expected == actual @@ -3219,12 +3952,19 @@ def test_parse_network_path(): actual = MemorystoreClient.parse_network_path(path) assert expected == actual + def test_service_attachment_path(): project = "scallop" region = "abalone" service_attachment = "squid" - expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) - actual = MemorystoreClient.service_attachment_path(project, region, service_attachment) + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = MemorystoreClient.service_attachment_path( + project, region, service_attachment + ) assert expected == actual @@ -3240,9 +3980,12 @@ def test_parse_service_attachment_path(): actual = MemorystoreClient.parse_service_attachment_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = MemorystoreClient.common_billing_account_path(billing_account) assert expected == actual @@ -3257,9 +4000,12 @@ def test_parse_common_billing_account_path(): actual = MemorystoreClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MemorystoreClient.common_folder_path(folder) assert expected == actual @@ -3274,9 +4020,12 @@ def test_parse_common_folder_path(): actual = MemorystoreClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MemorystoreClient.common_organization_path(organization) assert expected == actual @@ -3291,9 +4040,12 @@ def test_parse_common_organization_path(): actual = MemorystoreClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = MemorystoreClient.common_project_path(project) assert expected == actual @@ -3308,10 +4060,14 @@ def test_parse_common_project_path(): actual = MemorystoreClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = MemorystoreClient.common_location_path(project, location) assert expected == actual @@ -3331,14 +4087,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: transport_class = MemorystoreClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3349,10 +4109,11 @@ def test_client_with_default_client_info(): def test_transport_close_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -3360,12 +4121,11 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', + "rest", ] for transport in transports: client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -3374,9 +4134,13 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (MemorystoreClient, transports.MemorystoreRestTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MemorystoreClient, transports.MemorystoreRestTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -3391,7 +4155,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From 51f38ecf419bb161b9377bcc33b9cf19c6b213c9 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 26 Nov 2024 12:45:30 +0000 Subject: [PATCH 5/6] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../google-cloud-memorystore/v1/MANIFEST.in | 2 - .../google-cloud-memorystore/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../v1/docs/index.rst | 7 - .../v1/google/cloud/memorystore/__init__.py | 61 - .../google/cloud/memorystore_v1/__init__.py | 62 - .../google-cloud-memorystore/v1/noxfile.py | 280 --- .../google-cloud-memorystore/v1/setup.py | 98 - .../v1beta/.coveragerc | 13 - .../v1beta/MANIFEST.in | 2 - .../v1beta/README.rst | 49 - .../v1beta/docs/_static/custom.css | 3 - .../v1beta/docs/conf.py | 376 --- .../v1beta/docs/index.rst | 7 - .../google/cloud/memorystore/__init__.py | 61 - .../cloud/memorystore_v1beta/__init__.py | 62 - .../cloud/memorystore_v1beta/gapic_version.py | 16 - .../google/cloud/memorystore_v1beta/py.typed | 2 - .../google-cloud-memorystore/v1beta/mypy.ini | 3 - .../v1beta/noxfile.py | 280 --- .../v1beta/testing/constraints-3.10.txt | 6 - .../v1beta/testing/constraints-3.11.txt | 6 - .../v1beta/testing/constraints-3.12.txt | 6 - .../v1beta/testing/constraints-3.13.txt | 6 - .../v1beta/testing/constraints-3.7.txt | 10 - .../v1beta/testing/constraints-3.8.txt | 6 - .../v1beta/testing/constraints-3.9.txt | 6 - .../v1beta/tests/__init__.py | 16 - .../v1beta/tests/unit/__init__.py | 16 - .../v1beta/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/memorystore_v1beta/__init__.py | 16 - .../google-cloud-memorystore}/.coveragerc | 0 .../google-cloud-memorystore}/.flake8 | 4 +- packages/google-cloud-memorystore/.gitignore | 63 + .../google-cloud-memorystore/CHANGELOG.md | 1 + .../CODE_OF_CONDUCT.md | 95 + .../google-cloud-memorystore/CONTRIBUTING.rst | 273 ++ packages/google-cloud-memorystore/LICENSE | 202 ++ .../google-cloud-memorystore/MANIFEST.in | 24 +- packages/google-cloud-memorystore/README.rst | 108 + .../docs/CHANGELOG.md | 1 + .../google-cloud-memorystore/docs/README.rst | 1 + .../docs/_static/custom.css | 20 + .../docs/_templates/layout.html | 50 + .../google-cloud-memorystore}/docs/conf.py | 72 +- .../google-cloud-memorystore/docs/index.rst | 39 + .../docs/memorystore_v1/memorystore.rst | 0 .../docs/memorystore_v1/services_.rst | 0 .../docs/memorystore_v1/types_.rst | 0 .../docs/memorystore_v1beta/memorystore.rst | 0 .../docs/memorystore_v1beta/services_.rst | 0 .../docs/memorystore_v1beta/types_.rst | 0 .../docs/multiprocessing.rst | 7 + .../docs/summary_overview.md | 22 + .../google/cloud/memorystore/__init__.py | 63 + .../google/cloud/memorystore/gapic_version.py | 0 .../google/cloud/memorystore/py.typed | 0 .../google/cloud/memorystore_v1/__init__.py | 63 + .../cloud/memorystore_v1/gapic_metadata.json | 0 .../cloud/memorystore_v1/gapic_version.py | 0 .../google/cloud/memorystore_v1/py.typed | 0 .../cloud/memorystore_v1/services/__init__.py | 0 .../services/memorystore/__init__.py | 4 +- .../services/memorystore/client.py | 561 +++-- .../services/memorystore/pagers.py | 46 +- .../memorystore/transports/README.rst | 0 .../memorystore/transports/__init__.py | 12 +- .../services/memorystore/transports/base.py | 192 +- .../services/memorystore/transports/rest.py | 916 ++++--- .../memorystore/transports/rest_base.py | 344 +-- .../cloud/memorystore_v1/types/__init__.py | 40 +- .../cloud/memorystore_v1/types/memorystore.py | 135 +- .../cloud/memorystore_v1beta/__init__.py | 63 + .../memorystore_v1beta/gapic_metadata.json | 0 .../memorystore_v1beta}/gapic_version.py | 0 .../google/cloud/memorystore_v1beta}/py.typed | 0 .../memorystore_v1beta/services/__init__.py | 0 .../services/memorystore/__init__.py | 4 +- .../services/memorystore/client.py | 561 +++-- .../services/memorystore/pagers.py | 46 +- .../memorystore/transports/README.rst | 0 .../memorystore/transports/__init__.py | 12 +- .../services/memorystore/transports/base.py | 192 +- .../services/memorystore/transports/rest.py | 916 ++++--- .../memorystore/transports/rest_base.py | 344 +-- .../memorystore_v1beta/types/__init__.py | 40 +- .../memorystore_v1beta/types/memorystore.py | 135 +- .../google-cloud-memorystore}/mypy.ini | 0 packages/google-cloud-memorystore/noxfile.py | 460 ++++ ...erated_memorystore_create_instance_sync.py | 0 ...erated_memorystore_delete_instance_sync.py | 0 ...orystore_get_certificate_authority_sync.py | 0 ...generated_memorystore_get_instance_sync.py | 0 ...nerated_memorystore_list_instances_sync.py | 0 ...erated_memorystore_update_instance_sync.py | 0 ...erated_memorystore_create_instance_sync.py | 0 ...erated_memorystore_delete_instance_sync.py | 0 ...orystore_get_certificate_authority_sync.py | 0 ...generated_memorystore_get_instance_sync.py | 0 ...nerated_memorystore_list_instances_sync.py | 0 ...erated_memorystore_update_instance_sync.py | 0 ..._metadata_google.cloud.memorystore.v1.json | 0 ...adata_google.cloud.memorystore.v1beta.json | 0 .../scripts/decrypt-secrets.sh | 46 + .../scripts/fixup_memorystore_v1_keywords.py | 0 .../fixup_memorystore_v1beta_keywords.py | 0 .../google-cloud-memorystore}/setup.py | 13 +- .../testing/.gitignore | 3 + .../testing/constraints-3.10.txt | 0 .../testing/constraints-3.11.txt | 0 .../testing/constraints-3.12.txt | 0 .../testing/constraints-3.13.txt | 0 .../testing/constraints-3.7.txt | 0 .../testing/constraints-3.8.txt | 0 .../testing/constraints-3.9.txt | 0 .../tests}/__init__.py | 1 - .../tests/unit/__init__.py | 1 - .../tests/unit/gapic}/__init__.py | 1 - .../unit/gapic/memorystore_v1/__init__.py | 1 - .../gapic/memorystore_v1/test_memorystore.py | 2196 ++++++++++------ .../unit/gapic/memorystore_v1beta/__init__.py | 15 + .../memorystore_v1beta/test_memorystore.py | 2200 +++++++++++------ 122 files changed, 7404 insertions(+), 4750 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1/setup.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/README.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/.coveragerc (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/.flake8 (92%) create mode 100644 packages/google-cloud-memorystore/.gitignore create mode 100644 packages/google-cloud-memorystore/CHANGELOG.md create mode 100644 packages/google-cloud-memorystore/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-memorystore/CONTRIBUTING.rst create mode 100644 packages/google-cloud-memorystore/LICENSE rename owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 => packages/google-cloud-memorystore/MANIFEST.in (69%) create mode 100644 packages/google-cloud-memorystore/README.rst create mode 120000 packages/google-cloud-memorystore/docs/CHANGELOG.md create mode 120000 packages/google-cloud-memorystore/docs/README.rst create mode 100644 packages/google-cloud-memorystore/docs/_static/custom.css create mode 100644 packages/google-cloud-memorystore/docs/_templates/layout.html rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/conf.py (88%) create mode 100644 packages/google-cloud-memorystore/docs/index.rst rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/memorystore_v1/memorystore.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/memorystore_v1/services_.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/docs/memorystore_v1/types_.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/docs/memorystore_v1beta/memorystore.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/docs/memorystore_v1beta/services_.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/docs/memorystore_v1beta/types_.rst (100%) create mode 100644 packages/google-cloud-memorystore/docs/multiprocessing.rst create mode 100644 packages/google-cloud-memorystore/docs/summary_overview.md create mode 100644 packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore/py.typed (100%) create mode 100644 packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/gapic_metadata.json (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/py.typed (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/__init__.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta => packages/google-cloud-memorystore/google/cloud/memorystore_v1}/services/memorystore/__init__.py (94%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/client.py (81%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/pagers.py (78%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/README.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py (77%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/base.py (70%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/rest.py (67%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py (59%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/types/__init__.py (69%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/google/cloud/memorystore_v1/types/memorystore.py (93%) create mode 100644 packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/gapic_metadata.json (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore => packages/google-cloud-memorystore/google/cloud/memorystore_v1beta}/gapic_version.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore => packages/google-cloud-memorystore/google/cloud/memorystore_v1beta}/py.typed (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/__init__.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1 => packages/google-cloud-memorystore/google/cloud/memorystore_v1beta}/services/memorystore/__init__.py (94%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/client.py (81%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/pagers.py (78%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py (77%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py (70%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py (67%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py (59%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/types/__init__.py (69%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/google/cloud/memorystore_v1beta/types/memorystore.py (93%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/mypy.ini (100%) create mode 100644 packages/google-cloud-memorystore/noxfile.py rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json (100%) create mode 100755 packages/google-cloud-memorystore/scripts/decrypt-secrets.sh rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/scripts/fixup_memorystore_v1_keywords.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/scripts/fixup_memorystore_v1beta_keywords.py (100%) rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/setup.py (93%) create mode 100644 packages/google-cloud-memorystore/testing/.gitignore rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.10.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.11.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.12.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.13.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.7.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.8.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/testing/constraints-3.9.txt (100%) rename {owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic => packages/google-cloud-memorystore/tests}/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/tests/unit/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1/tests => packages/google-cloud-memorystore/tests/unit/gapic}/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/tests/unit/gapic/memorystore_v1/__init__.py (99%) rename {owl-bot-staging/google-cloud-memorystore/v1 => packages/google-cloud-memorystore}/tests/unit/gapic/memorystore_v1/test_memorystore.py (64%) create mode 100644 packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py rename {owl-bot-staging/google-cloud-memorystore/v1beta => packages/google-cloud-memorystore}/tests/unit/gapic/memorystore_v1beta/test_memorystore.py (64%) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in b/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in deleted file mode 100644 index cb2b6f08702d..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/memorystore *.py -recursive-include google/cloud/memorystore_v1 *.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/README.rst b/owl-bot-staging/google-cloud-memorystore/v1/README.rst deleted file mode 100644 index 6f935a43af2b..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Memorystore API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Memorystore API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst b/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst deleted file mode 100644 index bd30847b6cc3..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - memorystore_v1/services_ - memorystore_v1/types_ diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py deleted file mode 100644 index 6af94ce66397..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.memorystore_v1.services.memorystore.client import MemorystoreClient - -from google.cloud.memorystore_v1.types.memorystore import CertificateAuthority -from google.cloud.memorystore_v1.types.memorystore import CreateInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import DeleteInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import DiscoveryEndpoint -from google.cloud.memorystore_v1.types.memorystore import GetCertificateAuthorityRequest -from google.cloud.memorystore_v1.types.memorystore import GetInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import Instance -from google.cloud.memorystore_v1.types.memorystore import ListInstancesRequest -from google.cloud.memorystore_v1.types.memorystore import ListInstancesResponse -from google.cloud.memorystore_v1.types.memorystore import NodeConfig -from google.cloud.memorystore_v1.types.memorystore import OperationMetadata -from google.cloud.memorystore_v1.types.memorystore import PersistenceConfig -from google.cloud.memorystore_v1.types.memorystore import PscAutoConnection -from google.cloud.memorystore_v1.types.memorystore import PscConnection -from google.cloud.memorystore_v1.types.memorystore import UpdateInstanceRequest -from google.cloud.memorystore_v1.types.memorystore import ZoneDistributionConfig -from google.cloud.memorystore_v1.types.memorystore import ConnectionType -from google.cloud.memorystore_v1.types.memorystore import PscConnectionStatus - -__all__ = ('MemorystoreClient', - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py deleted file mode 100644 index 5ad10e820927..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.memorystore import MemorystoreClient - -from .types.memorystore import CertificateAuthority -from .types.memorystore import CreateInstanceRequest -from .types.memorystore import DeleteInstanceRequest -from .types.memorystore import DiscoveryEndpoint -from .types.memorystore import GetCertificateAuthorityRequest -from .types.memorystore import GetInstanceRequest -from .types.memorystore import Instance -from .types.memorystore import ListInstancesRequest -from .types.memorystore import ListInstancesResponse -from .types.memorystore import NodeConfig -from .types.memorystore import OperationMetadata -from .types.memorystore import PersistenceConfig -from .types.memorystore import PscAutoConnection -from .types.memorystore import PscConnection -from .types.memorystore import UpdateInstanceRequest -from .types.memorystore import ZoneDistributionConfig -from .types.memorystore import ConnectionType -from .types.memorystore import PscConnectionStatus - -__all__ = ( -'CertificateAuthority', -'ConnectionType', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'DiscoveryEndpoint', -'GetCertificateAuthorityRequest', -'GetInstanceRequest', -'Instance', -'ListInstancesRequest', -'ListInstancesResponse', -'MemorystoreClient', -'NodeConfig', -'OperationMetadata', -'PersistenceConfig', -'PscAutoConnection', -'PscConnection', -'PscConnectionStatus', -'UpdateInstanceRequest', -'ZoneDistributionConfig', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py b/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py deleted file mode 100644 index f0629be85ae7..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-memorystore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/setup.py b/owl-bot-staging/google-cloud-memorystore/v1/setup.py deleted file mode 100644 index 3cb03e5a17ca..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-memorystore' - - -description = "Google Cloud Memorystore API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/memorystore/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc b/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc deleted file mode 100644 index 90ec0ce4fe89..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/memorystore/__init__.py - google/cloud/memorystore/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in b/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in deleted file mode 100644 index fa2894ae07c0..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/memorystore *.py -recursive-include google/cloud/memorystore_v1beta *.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst deleted file mode 100644 index 6f935a43af2b..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Memorystore API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Memorystore API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py deleted file mode 100644 index 8d134830ab88..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-memorystore documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-memorystore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-memorystore-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-memorystore.tex", - u"google-cloud-memorystore Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-memorystore", - u"Google Cloud Memorystore Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-memorystore", - u"google-cloud-memorystore Documentation", - author, - "google-cloud-memorystore", - "GAPIC library for Google Cloud Memorystore API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst b/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst deleted file mode 100644 index 6c5c2af1be0f..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - memorystore_v1beta/services_ - memorystore_v1beta/types_ diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py deleted file mode 100644 index df718a2f7777..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.memorystore_v1beta.services.memorystore.client import MemorystoreClient - -from google.cloud.memorystore_v1beta.types.memorystore import CertificateAuthority -from google.cloud.memorystore_v1beta.types.memorystore import CreateInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import DeleteInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import DiscoveryEndpoint -from google.cloud.memorystore_v1beta.types.memorystore import GetCertificateAuthorityRequest -from google.cloud.memorystore_v1beta.types.memorystore import GetInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import Instance -from google.cloud.memorystore_v1beta.types.memorystore import ListInstancesRequest -from google.cloud.memorystore_v1beta.types.memorystore import ListInstancesResponse -from google.cloud.memorystore_v1beta.types.memorystore import NodeConfig -from google.cloud.memorystore_v1beta.types.memorystore import OperationMetadata -from google.cloud.memorystore_v1beta.types.memorystore import PersistenceConfig -from google.cloud.memorystore_v1beta.types.memorystore import PscAutoConnection -from google.cloud.memorystore_v1beta.types.memorystore import PscConnection -from google.cloud.memorystore_v1beta.types.memorystore import UpdateInstanceRequest -from google.cloud.memorystore_v1beta.types.memorystore import ZoneDistributionConfig -from google.cloud.memorystore_v1beta.types.memorystore import ConnectionType -from google.cloud.memorystore_v1beta.types.memorystore import PscConnectionStatus - -__all__ = ('MemorystoreClient', - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py deleted file mode 100644 index 72d6431d3df2..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.memorystore_v1beta import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.memorystore import MemorystoreClient - -from .types.memorystore import CertificateAuthority -from .types.memorystore import CreateInstanceRequest -from .types.memorystore import DeleteInstanceRequest -from .types.memorystore import DiscoveryEndpoint -from .types.memorystore import GetCertificateAuthorityRequest -from .types.memorystore import GetInstanceRequest -from .types.memorystore import Instance -from .types.memorystore import ListInstancesRequest -from .types.memorystore import ListInstancesResponse -from .types.memorystore import NodeConfig -from .types.memorystore import OperationMetadata -from .types.memorystore import PersistenceConfig -from .types.memorystore import PscAutoConnection -from .types.memorystore import PscConnection -from .types.memorystore import UpdateInstanceRequest -from .types.memorystore import ZoneDistributionConfig -from .types.memorystore import ConnectionType -from .types.memorystore import PscConnectionStatus - -__all__ = ( -'CertificateAuthority', -'ConnectionType', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'DiscoveryEndpoint', -'GetCertificateAuthorityRequest', -'GetInstanceRequest', -'Instance', -'ListInstancesRequest', -'ListInstancesResponse', -'MemorystoreClient', -'NodeConfig', -'OperationMetadata', -'PersistenceConfig', -'PscAutoConnection', -'PscConnection', -'PscConnectionStatus', -'UpdateInstanceRequest', -'ZoneDistributionConfig', -) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed b/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed deleted file mode 100644 index 3e10cbb3572e..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-memorystore package uses inline types. diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini b/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py b/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py deleted file mode 100644 index 9d2f5c2af33c..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-memorystore' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1beta/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/memorystore_v1beta/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py b/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-memorystore/v1/.coveragerc b/packages/google-cloud-memorystore/.coveragerc similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/.coveragerc rename to packages/google-cloud-memorystore/.coveragerc diff --git a/owl-bot-staging/google-cloud-memorystore/v1/.flake8 b/packages/google-cloud-memorystore/.flake8 similarity index 92% rename from owl-bot-staging/google-cloud-memorystore/v1/.flake8 rename to packages/google-cloud-memorystore/.flake8 index 29227d4cf419..32986c79287a 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/.flake8 +++ b/packages/google-cloud-memorystore/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-memorystore/.gitignore b/packages/google-cloud-memorystore/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-memorystore/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-memorystore/CHANGELOG.md b/packages/google-cloud-memorystore/CHANGELOG.md new file mode 100644 index 000000000000..5ddad421e08f --- /dev/null +++ b/packages/google-cloud-memorystore/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog \ No newline at end of file diff --git a/packages/google-cloud-memorystore/CODE_OF_CONDUCT.md b/packages/google-cloud-memorystore/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-memorystore/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-memorystore/CONTRIBUTING.rst b/packages/google-cloud-memorystore/CONTRIBUTING.rst new file mode 100644 index 000000000000..c51aa4cb16ff --- /dev/null +++ b/packages/google-cloud-memorystore/CONTRIBUTING.rst @@ -0,0 +1,273 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.13 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.13 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-memorystore + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-memorystore/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-memorystore/LICENSE b/packages/google-cloud-memorystore/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-memorystore/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 b/packages/google-cloud-memorystore/MANIFEST.in similarity index 69% rename from owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 rename to packages/google-cloud-memorystore/MANIFEST.in index 29227d4cf419..d6814cd60037 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/.flake8 +++ b/packages/google-cloud-memorystore/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,19 +15,11 @@ # limitations under the License. # Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-memorystore/README.rst b/packages/google-cloud-memorystore/README.rst new file mode 100644 index 000000000000..c7ac077d4c69 --- /dev/null +++ b/packages/google-cloud-memorystore/README.rst @@ -0,0 +1,108 @@ +Python Client for +================== + +|preview| |pypi| |versions| + +``_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-memorystore.svg + :target: https://pypi.org/project/google-cloud-memorystore/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memorystore.svg + :target: https://pypi.org/project/google-cloud-memorystore/ +.. _: +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_overview +.. _Product Documentation: + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the .`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the .: +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-memorystore + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-memorystore + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for + to see other available methods on the client. +- Read the ` Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _ Product documentation: +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-memorystore/docs/CHANGELOG.md b/packages/google-cloud-memorystore/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-memorystore/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-memorystore/docs/README.rst b/packages/google-cloud-memorystore/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-memorystore/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-memorystore/docs/_static/custom.css b/packages/google-cloud-memorystore/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-memorystore/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-memorystore/docs/_templates/layout.html b/packages/google-cloud-memorystore/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-memorystore/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py b/packages/google-cloud-memorystore/docs/conf.py similarity index 88% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py rename to packages/google-cloud-memorystore/docs/conf.py index 8d134830ab88..1dd800bba097 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/docs/conf.py +++ b/packages/google-cloud-memorystore/docs/conf.py @@ -5,7 +5,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# # google-cloud-memorystore documentation build configuration file # # This file is execfile()d with the current directory set to its @@ -25,21 +24,25 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +52,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -78,9 +80,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-memorystore" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +project = "google-cloud-memorystore" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +98,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +108,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +154,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-memorystore", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -258,13 +266,13 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', + #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', + #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - # 'preamble': '', + #'preamble': '', # Latex figure (float) alignment - # 'figure_align': 'htbp', + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -274,7 +282,7 @@ ( root_doc, "google-cloud-memorystore.tex", - u"google-cloud-memorystore Documentation", + "google-cloud-memorystore Documentation", author, "manual", ) @@ -309,7 +317,7 @@ ( root_doc, "google-cloud-memorystore", - u"Google Cloud Memorystore Documentation", + "google-cloud-memorystore Documentation", [author], 1, ) @@ -328,10 +336,10 @@ ( root_doc, "google-cloud-memorystore", - u"google-cloud-memorystore Documentation", + "google-cloud-memorystore Documentation", author, "google-cloud-memorystore", - "GAPIC library for Google Cloud Memorystore API", + "google-cloud-memorystore Library", "APIs", ) ] @@ -351,14 +359,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/google-cloud-memorystore/docs/index.rst b/packages/google-cloud-memorystore/docs/index.rst new file mode 100644 index 000000000000..e2ce5cdc7aeb --- /dev/null +++ b/packages/google-cloud-memorystore/docs/index.rst @@ -0,0 +1,39 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of . +By default, you will get version ``memorystore_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + memorystore_v1/services_ + memorystore_v1/types_ + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + memorystore_v1beta/services_ + memorystore_v1beta/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-memorystore`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst b/packages/google-cloud-memorystore/docs/memorystore_v1/memorystore.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/memorystore.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1/memorystore.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1/services_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/services_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1/services_.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1/types_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/docs/memorystore_v1/types_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1/types_.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst b/packages/google-cloud-memorystore/docs/memorystore_v1beta/memorystore.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/memorystore.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1beta/memorystore.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1beta/services_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/services_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1beta/services_.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst b/packages/google-cloud-memorystore/docs/memorystore_v1beta/types_.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/docs/memorystore_v1beta/types_.rst rename to packages/google-cloud-memorystore/docs/memorystore_v1beta/types_.rst diff --git a/packages/google-cloud-memorystore/docs/multiprocessing.rst b/packages/google-cloud-memorystore/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-memorystore/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-memorystore/docs/summary_overview.md b/packages/google-cloud-memorystore/docs/summary_overview.md new file mode 100644 index 000000000000..607b7f1693fc --- /dev/null +++ b/packages/google-cloud-memorystore/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# API + +Overview of the APIs available for API. + +## All entries + +Classes, methods and properties & attributes for + API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_property.html) diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py new file mode 100644 index 000000000000..a18c9a80f0b2 --- /dev/null +++ b/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.memorystore_v1.services.memorystore.client import MemorystoreClient +from google.cloud.memorystore_v1.types.memorystore import ( + CertificateAuthority, + ConnectionType, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + PscConnectionStatus, + UpdateInstanceRequest, + ZoneDistributionConfig, +) + +__all__ = ( + "MemorystoreClient", + "CertificateAuthority", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "UpdateInstanceRequest", + "ZoneDistributionConfig", + "ConnectionType", + "PscConnectionStatus", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/gapic_version.py rename to packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed b/packages/google-cloud-memorystore/google/cloud/memorystore/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore/py.typed rename to packages/google-cloud-memorystore/google/cloud/memorystore/py.typed diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py new file mode 100644 index 000000000000..64c6a11772a4 --- /dev/null +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.memorystore import MemorystoreClient +from .types.memorystore import ( + CertificateAuthority, + ConnectionType, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + PscConnectionStatus, + UpdateInstanceRequest, + ZoneDistributionConfig, +) + +__all__ = ( + "CertificateAuthority", + "ConnectionType", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MemorystoreClient", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "PscConnectionStatus", + "UpdateInstanceRequest", + "ZoneDistributionConfig", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_metadata.json rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/gapic_version.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/py.typed rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/py.typed diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/__init__.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/__init__.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/__init__.py similarity index 94% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/__init__.py index e2240b4bffb7..d1c440dabfa0 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/__init__.py @@ -15,6 +15,4 @@ # from .client import MemorystoreClient -__all__ = ( - 'MemorystoreClient', -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py similarity index 81% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py index 5dfc5b0d8992..eef12973a155 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py @@ -16,20 +16,32 @@ from collections import OrderedDict import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.memorystore_v1 import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.memorystore_v1 import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -38,14 +50,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.memorystore_v1.services.memorystore import pagers -from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MemorystoreTransport, DEFAULT_CLIENT_INFO + +from google.cloud.memorystore_v1.services.memorystore import pagers +from google.cloud.memorystore_v1.types import memorystore + +from .transports.base import DEFAULT_CLIENT_INFO, MemorystoreTransport from .transports.rest import MemorystoreRestTransport @@ -56,12 +70,14 @@ class MemorystoreClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] _transport_registry["rest"] = MemorystoreRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MemorystoreTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MemorystoreTransport]: """Returns an appropriate transport class. Args: @@ -153,8 +169,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: MemorystoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -171,117 +186,193 @@ def transport(self) -> MemorystoreTransport: return self._transport @staticmethod - def certificate_authority_path(project: str,location: str,instance: str,) -> str: + def certificate_authority_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified certificate_authority string.""" - return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_certificate_authority_path(path: str) -> Dict[str,str]: + def parse_certificate_authority_path(path: str) -> Dict[str, str]: """Parses a certificate_authority path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", + path, + ) return m.groupdict() if m else {} @staticmethod - def forwarding_rule_path(project: str,region: str,forwarding_rule: str,) -> str: + def forwarding_rule_path( + project: str, + region: str, + forwarding_rule: str, + ) -> str: """Returns a fully-qualified forwarding_rule string.""" - return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) @staticmethod - def parse_forwarding_rule_path(path: str) -> Dict[str,str]: + def parse_forwarding_rule_path(path: str) -> Dict[str, str]: """Parses a forwarding_rule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path(path: str) -> Dict[str, str]: """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def network_path(project: str,network: str,) -> str: + def network_path( + project: str, + network: str, + ) -> str: """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: + def parse_network_path(path: str) -> Dict[str, str]: """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def service_attachment_path(project: str,region: str,service_attachment: str,) -> str: + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: """Returns a fully-qualified service_attachment string.""" - return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) @staticmethod - def parse_service_attachment_path(path: str) -> Dict[str,str]: + def parse_service_attachment_path(path: str) -> Dict[str, str]: """Parses a service_attachment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -313,16 +404,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -335,7 +432,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -356,13 +455,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -385,7 +490,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -401,17 +508,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = MemorystoreClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = MemorystoreClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -464,12 +579,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the memorystore client. Args: @@ -524,21 +643,33 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MemorystoreClient._read_environment_variables() - self._client_cert_source = MemorystoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MemorystoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MemorystoreClient._read_environment_variables() + self._client_cert_source = MemorystoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MemorystoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -547,8 +678,10 @@ def __init__(self, *, if transport_provided: # transport is a MemorystoreTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -557,20 +690,26 @@ def __init__(self, *, self._transport = cast(MemorystoreTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - MemorystoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or MemorystoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[MemorystoreTransport], Callable[..., MemorystoreTransport]] = ( + transport_init: Union[ + Type[MemorystoreTransport], Callable[..., MemorystoreTransport] + ] = ( MemorystoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MemorystoreTransport], transport) @@ -588,14 +727,15 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) - def list_instances(self, - request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: r"""Lists Instances in a given project and location. .. code-block:: python @@ -655,8 +795,10 @@ def sample_list_instances(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -674,9 +816,7 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -704,14 +844,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.Instance: + def get_instance( + self, + request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Gets details of a single Instance. .. code-block:: python @@ -766,8 +907,10 @@ def sample_get_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -785,9 +928,7 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -804,16 +945,17 @@ def sample_get_instance(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance: Optional[memorystore.Instance] = None, - instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[memorystore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Instance in a given project and location. @@ -907,8 +1049,10 @@ def sample_create_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance, instance_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -930,9 +1074,7 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -957,15 +1099,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[memorystore.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[memorystore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Updates the parameters of a single Instance. .. code-block:: python @@ -1039,8 +1182,10 @@ def sample_update_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1060,9 +1205,9 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -1087,14 +1232,15 @@ def sample_update_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Instance. .. code-block:: python @@ -1165,8 +1311,10 @@ def sample_delete_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1184,9 +1332,7 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1211,14 +1357,17 @@ def sample_delete_instance(): # Done; return the response. return response - def get_certificate_authority(self, - request: Optional[Union[memorystore.GetCertificateAuthorityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.CertificateAuthority: + def get_certificate_authority( + self, + request: Optional[ + Union[memorystore.GetCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Gets details about the certificate authority for an Instance. @@ -1277,8 +1426,10 @@ def sample_get_certificate_authority(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1291,14 +1442,14 @@ def sample_get_certificate_authority(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_certificate_authority] + rpc = self._transport._wrapped_methods[ + self._transport.get_certificate_authority + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1364,8 +1515,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1373,7 +1523,11 @@ def list_operations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1414,8 +1568,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1423,7 +1576,11 @@ def get_operation( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1468,15 +1625,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -1517,15 +1678,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -1563,8 +1728,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1572,7 +1736,11 @@ def get_location( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1613,8 +1781,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1622,15 +1789,19 @@ def list_locations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) -__all__ = ( - "MemorystoreClient", -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py similarity index 78% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py index 025c07bfda1e..027706df288d 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/pagers.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., memorystore.ListInstancesResponse], - request: memorystore.ListInstancesRequest, - response: memorystore.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., memorystore.ListInstancesResponse], + request: memorystore.ListInstancesRequest, + response: memorystore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -82,7 +99,12 @@ def pages(self) -> Iterator[memorystore.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[memorystore.Instance]: @@ -90,4 +112,4 @@ def __iter__(self) -> Iterator[memorystore.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/README.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/README.rst rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/README.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py similarity index 77% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py index 6172c94a25d4..a1f2673d9bb5 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/__init__.py @@ -17,16 +17,14 @@ from typing import Dict, Type from .base import MemorystoreTransport -from .rest import MemorystoreRestTransport -from .rest import MemorystoreRestInterceptor - +from .rest import MemorystoreRestInterceptor, MemorystoreRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] -_transport_registry['rest'] = MemorystoreRestTransport +_transport_registry["rest"] = MemorystoreRestTransport __all__ = ( - 'MemorystoreTransport', - 'MemorystoreRestTransport', - 'MemorystoreRestInterceptor', + "MemorystoreTransport", + "MemorystoreRestTransport", + "MemorystoreRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py similarity index 70% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py index 216396a3b63e..e81e20865449 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py @@ -16,44 +16,44 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.memorystore_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1 import gapic_version as package_version from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class MemorystoreTransport(abc.ABC): """Abstract transport class for Memorystore.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "memorystore.googleapis.com" - DEFAULT_HOST: str = 'memorystore.googleapis.com' def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -89,30 +89,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -209,14 +217,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -226,57 +234,63 @@ def operations_client(self): raise NotImplementedError() @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - Union[ - memorystore.ListInstancesResponse, - Awaitable[memorystore.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], + Union[ + memorystore.ListInstancesResponse, + Awaitable[memorystore.ListInstancesResponse], + ], + ]: raise NotImplementedError() @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - Union[ - memorystore.Instance, - Awaitable[memorystore.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [memorystore.GetInstanceRequest], + Union[memorystore.Instance, Awaitable[memorystore.Instance]], + ]: raise NotImplementedError() @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [memorystore.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [memorystore.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [memorystore.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - Union[ - memorystore.CertificateAuthority, - Awaitable[memorystore.CertificateAuthority] - ]]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + Union[ + memorystore.CertificateAuthority, + Awaitable[memorystore.CertificateAuthority], + ], + ]: raise NotImplementedError() @property @@ -284,7 +298,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -300,23 +317,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -324,10 +336,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -336,6 +352,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MemorystoreTransport', -) +__all__ = ("MemorystoreTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py similarity index 67% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py index d990c453e173..f28f5839be6a 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py @@ -14,31 +14,25 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseMemorystoreRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseMemorystoreRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -121,7 +115,12 @@ def post_update_instance(self, response): """ - def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + + def pre_create_instance( + self, + request: memorystore.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -129,7 +128,9 @@ def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance Override in a subclass to manipulate the response @@ -138,7 +139,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_delete_instance( + self, + request: memorystore.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -146,7 +151,9 @@ def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance Override in a subclass to manipulate the response @@ -155,7 +162,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_get_certificate_authority(self, request: memorystore.GetCertificateAuthorityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: + def pre_get_certificate_authority( + self, + request: memorystore.GetCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the request or metadata @@ -163,7 +174,9 @@ def pre_get_certificate_authority(self, request: memorystore.GetCertificateAutho """ return request, metadata - def post_get_certificate_authority(self, response: memorystore.CertificateAuthority) -> memorystore.CertificateAuthority: + def post_get_certificate_authority( + self, response: memorystore.CertificateAuthority + ) -> memorystore.CertificateAuthority: """Post-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the response @@ -172,7 +185,11 @@ def post_get_certificate_authority(self, response: memorystore.CertificateAuthor """ return response - def pre_get_instance(self, request: memorystore.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_get_instance( + self, + request: memorystore.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -189,7 +206,11 @@ def post_get_instance(self, response: memorystore.Instance) -> memorystore.Insta """ return response - def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + def pre_list_instances( + self, + request: memorystore.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -197,7 +218,9 @@ def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: memorystore.ListInstancesResponse) -> memorystore.ListInstancesResponse: + def post_list_instances( + self, response: memorystore.ListInstancesResponse + ) -> memorystore.ListInstancesResponse: """Post-rpc interceptor for list_instances Override in a subclass to manipulate the response @@ -206,7 +229,11 @@ def post_list_instances(self, response: memorystore.ListInstancesResponse) -> me """ return response - def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_update_instance( + self, + request: memorystore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -214,7 +241,9 @@ def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance Override in a subclass to manipulate the response @@ -224,7 +253,9 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location @@ -245,7 +276,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations @@ -266,7 +299,9 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation @@ -275,9 +310,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -287,7 +320,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation @@ -296,9 +331,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -308,7 +341,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation @@ -329,7 +364,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations @@ -369,20 +406,21 @@ class MemorystoreRestTransport(_BaseMemorystoreRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MemorystoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MemorystoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -425,10 +463,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -445,46 +484,51 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateInstance(_BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub): + class _CreateInstance( + _BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CreateInstance") @@ -496,27 +540,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -536,17 +582,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -559,7 +621,9 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) return resp - class _DeleteInstance(_BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub): + class _DeleteInstance( + _BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteInstance") @@ -571,26 +635,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -610,15 +676,28 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + ) request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -631,7 +710,9 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) return resp - class _GetCertificateAuthority(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub): + class _GetCertificateAuthority( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetCertificateAuthority") @@ -643,26 +724,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetCertificateAuthorityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.CertificateAuthority: + def __call__( + self, + request: memorystore.GetCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Call the get certificate authority method over HTTP. Args: @@ -681,15 +764,30 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() - request, metadata = self._interceptor.pre_get_certificate_authority(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + ) + request, metadata = self._interceptor.pre_get_certificate_authority( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._GetCertificateAuthority._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetCertificateAuthority._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -704,7 +802,9 @@ def __call__(self, resp = self._interceptor.post_get_certificate_authority(resp) return resp - class _GetInstance(_BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub): + class _GetInstance( + _BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetInstance") @@ -716,26 +816,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.Instance: + def __call__( + self, + request: memorystore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Call the get instance method over HTTP. Args: @@ -752,15 +854,32 @@ def __call__(self, A Memorystore instance. """ - http_options = _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + ) request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -775,7 +894,9 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp - class _ListInstances(_BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub): + class _ListInstances( + _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListInstances") @@ -787,26 +908,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.ListInstancesResponse: + def __call__( + self, + request: memorystore.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -823,15 +946,30 @@ def __call__(self, Response message for [ListInstances][]. """ - http_options = _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + ) request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -846,7 +984,9 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp - class _UpdateInstance(_BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub): + class _UpdateInstance( + _BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.UpdateInstance") @@ -858,27 +998,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -898,17 +1040,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -922,58 +1080,64 @@ def __call__(self, return resp @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[memorystore.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[memorystore.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - memorystore.CertificateAuthority]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], memorystore.CertificateAuthority + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - memorystore.Instance]: + def get_instance( + self, + ) -> Callable[[memorystore.GetInstanceRequest], memorystore.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - memorystore.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], memorystore.ListInstancesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[memorystore.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub): + class _GetLocation( + _BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetLocation") @@ -985,27 +1149,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1021,15 +1186,32 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1044,9 +1226,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub): + class _ListLocations( + _BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListLocations") @@ -1058,27 +1242,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1094,15 +1279,30 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1117,9 +1317,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub): + class _CancelOperation( + _BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CancelOperation") @@ -1131,27 +1333,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1164,15 +1367,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1183,9 +1401,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub): + class _DeleteOperation( + _BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteOperation") @@ -1197,27 +1417,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1230,15 +1451,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1249,9 +1485,11 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub): + class _GetOperation( + _BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetOperation") @@ -1263,27 +1501,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1299,15 +1538,32 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1322,9 +1578,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub): + class _ListOperations( + _BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListOperations") @@ -1336,27 +1594,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1372,15 +1631,28 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1401,6 +1673,4 @@ def close(self): self._session.close() -__all__=( - 'MemorystoreRestTransport', -) +__all__ = ("MemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py similarity index 59% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py index 3155d66c201d..b7db715096ee 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py @@ -14,19 +14,17 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import MemorystoreTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, MemorystoreTransport class _BaseMemorystoreRestTransport(MemorystoreTransport): @@ -42,14 +40,16 @@ class _BaseMemorystoreRestTransport(MemorystoreTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +73,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,27 +86,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,17 +127,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -138,19 +152,23 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -162,11 +180,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -175,19 +199,23 @@ class _BaseGetCertificateAuthority: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority", + }, ] return http_options @@ -199,11 +227,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -212,19 +246,23 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -236,11 +274,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -249,19 +293,23 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -273,11 +321,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -286,20 +340,24 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -314,17 +372,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -335,23 +399,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -360,23 +424,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -385,23 +449,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -410,23 +474,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -435,23 +499,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -460,26 +524,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseMemorystoreRestTransport', -) +__all__ = ("_BaseMemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py similarity index 69% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py index fc13543f2db2..a639afb23c9d 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py @@ -15,6 +15,7 @@ # from .memorystore import ( CertificateAuthority, + ConnectionType, CreateInstanceRequest, DeleteInstanceRequest, DiscoveryEndpoint, @@ -28,29 +29,28 @@ PersistenceConfig, PscAutoConnection, PscConnection, + PscConnectionStatus, UpdateInstanceRequest, ZoneDistributionConfig, - ConnectionType, - PscConnectionStatus, ) __all__ = ( - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', + "CertificateAuthority", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "UpdateInstanceRequest", + "ZoneDistributionConfig", + "ConnectionType", + "PscConnectionStatus", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py similarity index 93% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py index ed348f00d027..840f7254c853 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py @@ -17,33 +17,31 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.memorystore.v1', + package="google.cloud.memorystore.v1", manifest={ - 'PscConnectionStatus', - 'ConnectionType', - 'Instance', - 'PscAutoConnection', - 'PscConnection', - 'DiscoveryEndpoint', - 'PersistenceConfig', - 'NodeConfig', - 'ZoneDistributionConfig', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'GetCertificateAuthorityRequest', - 'CertificateAuthority', - 'OperationMetadata', + "PscConnectionStatus", + "ConnectionType", + "Instance", + "PscAutoConnection", + "PscConnection", + "DiscoveryEndpoint", + "PersistenceConfig", + "NodeConfig", + "ZoneDistributionConfig", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "GetCertificateAuthorityRequest", + "CertificateAuthority", + "OperationMetadata", }, ) @@ -162,6 +160,7 @@ class Instance(proto.Message): mode (google.cloud.memorystore_v1.types.Instance.Mode): Optional. The mode config for the instance. """ + class State(proto.Enum): r"""Possible states of the instance. @@ -295,11 +294,11 @@ class UpdateInfo(proto.Message): optional=True, ) - update_info: 'Instance.StateInfo.UpdateInfo' = proto.Field( + update_info: "Instance.StateInfo.UpdateInfo" = proto.Field( proto.MESSAGE, number=1, - oneof='info', - message='Instance.StateInfo.UpdateInfo', + oneof="info", + message="Instance.StateInfo.UpdateInfo", ) class InstanceEndpoint(proto.Message): @@ -315,10 +314,10 @@ class InstanceEndpoint(proto.Message): each service attachment in the cluster. """ - connections: MutableSequence['Instance.ConnectionDetail'] = proto.RepeatedField( + connections: MutableSequence["Instance.ConnectionDetail"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance.ConnectionDetail', + message="Instance.ConnectionDetail", ) class ConnectionDetail(proto.Message): @@ -345,17 +344,17 @@ class ConnectionDetail(proto.Message): This field is a member of `oneof`_ ``connection``. """ - psc_auto_connection: 'PscAutoConnection' = proto.Field( + psc_auto_connection: "PscAutoConnection" = proto.Field( proto.MESSAGE, number=1, - oneof='connection', - message='PscAutoConnection', + oneof="connection", + message="PscAutoConnection", ) - psc_connection: 'PscConnection' = proto.Field( + psc_connection: "PscConnection" = proto.Field( proto.MESSAGE, number=2, - oneof='connection', - message='PscConnection', + oneof="connection", + message="PscConnection", ) name: str = proto.Field( @@ -410,20 +409,20 @@ class ConnectionDetail(proto.Message): proto.INT32, number=11, ) - discovery_endpoints: MutableSequence['DiscoveryEndpoint'] = proto.RepeatedField( + discovery_endpoints: MutableSequence["DiscoveryEndpoint"] = proto.RepeatedField( proto.MESSAGE, number=12, - message='DiscoveryEndpoint', + message="DiscoveryEndpoint", ) node_type: NodeType = proto.Field( proto.ENUM, number=13, enum=NodeType, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=14, - message='PersistenceConfig', + message="PersistenceConfig", ) engine_version: str = proto.Field( proto.STRING, @@ -434,25 +433,25 @@ class ConnectionDetail(proto.Message): proto.STRING, number=16, ) - node_config: 'NodeConfig' = proto.Field( + node_config: "NodeConfig" = proto.Field( proto.MESSAGE, number=17, - message='NodeConfig', + message="NodeConfig", ) - zone_distribution_config: 'ZoneDistributionConfig' = proto.Field( + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( proto.MESSAGE, number=18, - message='ZoneDistributionConfig', + message="ZoneDistributionConfig", ) deletion_protection_enabled: bool = proto.Field( proto.BOOL, number=19, optional=True, ) - psc_auto_connections: MutableSequence['PscAutoConnection'] = proto.RepeatedField( + psc_auto_connections: MutableSequence["PscAutoConnection"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='PscAutoConnection', + message="PscAutoConnection", ) endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( proto.MESSAGE, @@ -514,7 +513,7 @@ class PscAutoConnection(proto.Message): port: int = proto.Field( proto.INT32, number=9, - oneof='ports', + oneof="ports", ) psc_connection_id: str = proto.Field( proto.STRING, @@ -540,15 +539,15 @@ class PscAutoConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -613,15 +612,15 @@ class PscConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -669,6 +668,7 @@ class PersistenceConfig(proto.Message): Optional. AOF configuration. This field will be ignored if mode is not AOF. """ + class PersistenceMode(proto.Enum): r"""Possible persistence modes. @@ -700,6 +700,7 @@ class RDBConfig(proto.Message): snapshots will be aligned. If not provided, the current time will be used. """ + class SnapshotPeriod(proto.Enum): r"""Possible snapshot periods. @@ -721,10 +722,10 @@ class SnapshotPeriod(proto.Enum): TWELVE_HOURS = 3 TWENTY_FOUR_HOURS = 4 - rdb_snapshot_period: 'PersistenceConfig.RDBConfig.SnapshotPeriod' = proto.Field( + rdb_snapshot_period: "PersistenceConfig.RDBConfig.SnapshotPeriod" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.RDBConfig.SnapshotPeriod', + enum="PersistenceConfig.RDBConfig.SnapshotPeriod", ) rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -739,6 +740,7 @@ class AOFConfig(proto.Message): append_fsync (google.cloud.memorystore_v1.types.PersistenceConfig.AOFConfig.AppendFsync): Optional. The fsync mode. """ + class AppendFsync(proto.Enum): r"""Possible fsync modes. @@ -762,10 +764,10 @@ class AppendFsync(proto.Enum): EVERY_SEC = 2 ALWAYS = 3 - append_fsync: 'PersistenceConfig.AOFConfig.AppendFsync' = proto.Field( + append_fsync: "PersistenceConfig.AOFConfig.AppendFsync" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.AOFConfig.AppendFsync', + enum="PersistenceConfig.AOFConfig.AppendFsync", ) mode: PersistenceMode = proto.Field( @@ -811,6 +813,7 @@ class ZoneDistributionConfig(proto.Message): Optional. Current zone distribution mode. Defaults to MULTI_ZONE. """ + class ZoneDistributionMode(proto.Enum): r"""Possible zone distribution modes. @@ -904,10 +907,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -986,10 +989,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1034,10 +1037,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1142,16 +1145,18 @@ class CertChain(proto.Message): number=1, ) - ca_certs: MutableSequence['CertificateAuthority.ManagedCertificateAuthority.CertChain'] = proto.RepeatedField( + ca_certs: MutableSequence[ + "CertificateAuthority.ManagedCertificateAuthority.CertChain" + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message='CertificateAuthority.ManagedCertificateAuthority.CertChain', + message="CertificateAuthority.ManagedCertificateAuthority.CertChain", ) managed_server_ca: ManagedCertificateAuthority = proto.Field( proto.MESSAGE, number=2, - oneof='server_ca', + oneof="server_ca", message=ManagedCertificateAuthority, ) name: str = proto.Field( diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py new file mode 100644 index 000000000000..f787aca084c1 --- /dev/null +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.memorystore_v1beta import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.memorystore import MemorystoreClient +from .types.memorystore import ( + CertificateAuthority, + ConnectionType, + CreateInstanceRequest, + DeleteInstanceRequest, + DiscoveryEndpoint, + GetCertificateAuthorityRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + NodeConfig, + OperationMetadata, + PersistenceConfig, + PscAutoConnection, + PscConnection, + PscConnectionStatus, + UpdateInstanceRequest, + ZoneDistributionConfig, +) + +__all__ = ( + "CertificateAuthority", + "ConnectionType", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MemorystoreClient", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "PscConnectionStatus", + "UpdateInstanceRequest", + "ZoneDistributionConfig", +) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/gapic_metadata.json rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_metadata.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/gapic_version.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/py.typed similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore/py.typed rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/py.typed diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/__init__.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/__init__.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/__init__.py similarity index 94% rename from owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/__init__.py index e2240b4bffb7..d1c440dabfa0 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/google/cloud/memorystore_v1/services/memorystore/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/__init__.py @@ -15,6 +15,4 @@ # from .client import MemorystoreClient -__all__ = ( - 'MemorystoreClient', -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py similarity index 81% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py index f543e438492b..546c4c14b9af 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py @@ -16,20 +16,32 @@ from collections import OrderedDict import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings -from google.cloud.memorystore_v1beta import gapic_version as package_version - from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.memorystore_v1beta import gapic_version as package_version try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -38,14 +50,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.memorystore_v1beta.services.memorystore import pagers -from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MemorystoreTransport, DEFAULT_CLIENT_INFO + +from google.cloud.memorystore_v1beta.services.memorystore import pagers +from google.cloud.memorystore_v1beta.types import memorystore + +from .transports.base import DEFAULT_CLIENT_INFO, MemorystoreTransport from .transports.rest import MemorystoreRestTransport @@ -56,12 +70,14 @@ class MemorystoreClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] _transport_registry["rest"] = MemorystoreRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MemorystoreTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MemorystoreTransport]: """Returns an appropriate transport class. Args: @@ -153,8 +169,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: MemorystoreClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -171,117 +186,193 @@ def transport(self) -> MemorystoreTransport: return self._transport @staticmethod - def certificate_authority_path(project: str,location: str,instance: str,) -> str: + def certificate_authority_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified certificate_authority string.""" - return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_certificate_authority_path(path: str) -> Dict[str,str]: + def parse_certificate_authority_path(path: str) -> Dict[str, str]: """Parses a certificate_authority path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/certificateAuthority$", + path, + ) return m.groupdict() if m else {} @staticmethod - def forwarding_rule_path(project: str,region: str,forwarding_rule: str,) -> str: + def forwarding_rule_path( + project: str, + region: str, + forwarding_rule: str, + ) -> str: """Returns a fully-qualified forwarding_rule string.""" - return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + return "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) @staticmethod - def parse_forwarding_rule_path(path: str) -> Dict[str,str]: + def parse_forwarding_rule_path(path: str) -> Dict[str, str]: """Parses a forwarding_rule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/forwardingRules/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path(path: str) -> Dict[str, str]: """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def network_path(project: str,network: str,) -> str: + def network_path( + project: str, + network: str, + ) -> str: """Returns a fully-qualified network string.""" - return "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: + def parse_network_path(path: str) -> Dict[str, str]: """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/global/networks/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def service_attachment_path(project: str,region: str,service_attachment: str,) -> str: + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: """Returns a fully-qualified service_attachment string.""" - return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) @staticmethod - def parse_service_attachment_path(path: str) -> Dict[str,str]: + def parse_service_attachment_path(path: str) -> Dict[str, str]: """Parses a service_attachment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -313,16 +404,22 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -335,7 +432,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -356,13 +455,19 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert == "true", use_mtls_endpoint, universe_domain_env @staticmethod @@ -385,7 +490,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): """Return the API endpoint used by the client. Args: @@ -401,17 +508,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = MemorystoreClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = MemorystoreClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -464,12 +579,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MemorystoreTransport, Callable[..., MemorystoreTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the memorystore client. Args: @@ -524,21 +643,33 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MemorystoreClient._read_environment_variables() - self._client_cert_source = MemorystoreClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MemorystoreClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MemorystoreClient._read_environment_variables() + self._client_cert_source = MemorystoreClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MemorystoreClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -547,8 +678,10 @@ def __init__(self, *, if transport_provided: # transport is a MemorystoreTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " @@ -557,20 +690,26 @@ def __init__(self, *, self._transport = cast(MemorystoreTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - MemorystoreClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or MemorystoreClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[MemorystoreTransport], Callable[..., MemorystoreTransport]] = ( + transport_init: Union[ + Type[MemorystoreTransport], Callable[..., MemorystoreTransport] + ] = ( MemorystoreClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MemorystoreTransport], transport) @@ -588,14 +727,15 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) - def list_instances(self, - request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[memorystore.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: r"""Lists Instances in a given project and location. .. code-block:: python @@ -655,8 +795,10 @@ def sample_list_instances(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -674,9 +816,7 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -704,14 +844,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.Instance: + def get_instance( + self, + request: Optional[Union[memorystore.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Gets details of a single Instance. .. code-block:: python @@ -766,8 +907,10 @@ def sample_get_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -785,9 +928,7 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -804,16 +945,17 @@ def sample_get_instance(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance: Optional[memorystore.Instance] = None, - instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[memorystore.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance: Optional[memorystore.Instance] = None, + instance_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Creates a new Instance in a given project and location. @@ -907,8 +1049,10 @@ def sample_create_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance, instance_id]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -930,9 +1074,7 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -957,15 +1099,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[memorystore.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[memorystore.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[memorystore.Instance] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Updates the parameters of a single Instance. .. code-block:: python @@ -1039,8 +1182,10 @@ def sample_update_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([instance, update_mask]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1060,9 +1205,9 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -1087,14 +1232,15 @@ def sample_update_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[memorystore.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: r"""Deletes a single Instance. .. code-block:: python @@ -1165,8 +1311,10 @@ def sample_delete_instance(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1184,9 +1332,7 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1211,14 +1357,17 @@ def sample_delete_instance(): # Done; return the response. return response - def get_certificate_authority(self, - request: Optional[Union[memorystore.GetCertificateAuthorityRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> memorystore.CertificateAuthority: + def get_certificate_authority( + self, + request: Optional[ + Union[memorystore.GetCertificateAuthorityRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Gets details about the certificate authority for an Instance. @@ -1277,8 +1426,10 @@ def sample_get_certificate_authority(): # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1291,14 +1442,14 @@ def sample_get_certificate_authority(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_certificate_authority] + rpc = self._transport._wrapped_methods[ + self._transport.get_certificate_authority + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1364,8 +1515,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1373,7 +1523,11 @@ def list_operations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1414,8 +1568,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1423,7 +1576,11 @@ def get_operation( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1468,15 +1625,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -1517,15 +1678,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def get_location( self, @@ -1563,8 +1728,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1572,7 +1736,11 @@ def get_location( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1613,8 +1781,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1622,15 +1789,19 @@ def list_locations( # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) -__all__ = ( - "MemorystoreClient", -) +__all__ = ("MemorystoreClient",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/pagers.py similarity index 78% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/pagers.py index efff474686e6..f793f8b5f67c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/pagers.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., memorystore.ListInstancesResponse], - request: memorystore.ListInstancesRequest, - response: memorystore.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + + def __init__( + self, + method: Callable[..., memorystore.ListInstancesResponse], + request: memorystore.ListInstancesRequest, + response: memorystore.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = () + ): """Instantiate the pager. Args: @@ -82,7 +99,12 @@ def pages(self) -> Iterator[memorystore.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[memorystore.Instance]: @@ -90,4 +112,4 @@ def __iter__(self) -> Iterator[memorystore.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/README.rst diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py similarity index 77% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py index 6172c94a25d4..a1f2673d9bb5 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/__init__.py @@ -17,16 +17,14 @@ from typing import Dict, Type from .base import MemorystoreTransport -from .rest import MemorystoreRestTransport -from .rest import MemorystoreRestInterceptor - +from .rest import MemorystoreRestInterceptor, MemorystoreRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MemorystoreTransport]] -_transport_registry['rest'] = MemorystoreRestTransport +_transport_registry["rest"] = MemorystoreRestTransport __all__ = ( - 'MemorystoreTransport', - 'MemorystoreRestTransport', - 'MemorystoreRestInterceptor', + "MemorystoreTransport", + "MemorystoreRestTransport", + "MemorystoreRestInterceptor", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py similarity index 70% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py index 5b8147a83f2f..adfd6080032c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/base.py @@ -16,44 +16,44 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.memorystore_v1beta import gapic_version as package_version - -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.memorystore_v1beta import gapic_version as package_version from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class MemorystoreTransport(abc.ABC): """Abstract transport class for Memorystore.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "memorystore.googleapis.com" - DEFAULT_HOST: str = 'memorystore.googleapis.com' def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -89,30 +89,38 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -209,14 +217,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -226,57 +234,63 @@ def operations_client(self): raise NotImplementedError() @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - Union[ - memorystore.ListInstancesResponse, - Awaitable[memorystore.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], + Union[ + memorystore.ListInstancesResponse, + Awaitable[memorystore.ListInstancesResponse], + ], + ]: raise NotImplementedError() @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - Union[ - memorystore.Instance, - Awaitable[memorystore.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [memorystore.GetInstanceRequest], + Union[memorystore.Instance, Awaitable[memorystore.Instance]], + ]: raise NotImplementedError() @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [memorystore.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [memorystore.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [memorystore.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - Union[ - memorystore.CertificateAuthority, - Awaitable[memorystore.CertificateAuthority] - ]]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], + Union[ + memorystore.CertificateAuthority, + Awaitable[memorystore.CertificateAuthority], + ], + ]: raise NotImplementedError() @property @@ -284,7 +298,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -300,23 +317,18 @@ def get_operation( @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -324,10 +336,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -336,6 +352,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MemorystoreTransport', -) +__all__ = ("MemorystoreTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py similarity index 67% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py index 74bfb95d397d..6cbe62d803ff 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py @@ -14,31 +14,25 @@ # limitations under the License. # -from google.auth.transport.requests import AuthorizedSession # type: ignore +import dataclasses import json # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseMemorystoreRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseMemorystoreRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -121,7 +115,12 @@ def post_update_instance(self, response): """ - def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + + def pre_create_instance( + self, + request: memorystore.CreateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.CreateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -129,7 +128,9 @@ def pre_create_instance(self, request: memorystore.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance Override in a subclass to manipulate the response @@ -138,7 +139,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_delete_instance( + self, + request: memorystore.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -146,7 +151,9 @@ def pre_delete_instance(self, request: memorystore.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance Override in a subclass to manipulate the response @@ -155,7 +162,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_get_certificate_authority(self, request: memorystore.GetCertificateAuthorityRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: + def pre_get_certificate_authority( + self, + request: memorystore.GetCertificateAuthorityRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetCertificateAuthorityRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the request or metadata @@ -163,7 +174,9 @@ def pre_get_certificate_authority(self, request: memorystore.GetCertificateAutho """ return request, metadata - def post_get_certificate_authority(self, response: memorystore.CertificateAuthority) -> memorystore.CertificateAuthority: + def post_get_certificate_authority( + self, response: memorystore.CertificateAuthority + ) -> memorystore.CertificateAuthority: """Post-rpc interceptor for get_certificate_authority Override in a subclass to manipulate the response @@ -172,7 +185,11 @@ def post_get_certificate_authority(self, response: memorystore.CertificateAuthor """ return response - def pre_get_instance(self, request: memorystore.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_get_instance( + self, + request: memorystore.GetInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.GetInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -189,7 +206,11 @@ def post_get_instance(self, response: memorystore.Instance) -> memorystore.Insta """ return response - def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: + def pre_list_instances( + self, + request: memorystore.ListInstancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.ListInstancesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -197,7 +218,9 @@ def pre_list_instances(self, request: memorystore.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: memorystore.ListInstancesResponse) -> memorystore.ListInstancesResponse: + def post_list_instances( + self, response: memorystore.ListInstancesResponse + ) -> memorystore.ListInstancesResponse: """Post-rpc interceptor for list_instances Override in a subclass to manipulate the response @@ -206,7 +229,11 @@ def post_list_instances(self, response: memorystore.ListInstancesResponse) -> me """ return response - def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_update_instance( + self, + request: memorystore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[memorystore.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -214,7 +241,9 @@ def pre_update_instance(self, request: memorystore.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance Override in a subclass to manipulate the response @@ -224,7 +253,9 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_location @@ -245,7 +276,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_locations @@ -266,7 +299,9 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for cancel_operation @@ -275,9 +310,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -287,7 +320,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_operation @@ -296,9 +331,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -308,7 +341,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_operation @@ -329,7 +364,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_operations @@ -369,20 +406,21 @@ class MemorystoreRestTransport(_BaseMemorystoreRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MemorystoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MemorystoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -425,10 +463,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -445,46 +484,51 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1beta") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateInstance(_BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub): + class _CreateInstance( + _BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CreateInstance") @@ -496,27 +540,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -536,17 +582,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -559,7 +621,9 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) return resp - class _DeleteInstance(_BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub): + class _DeleteInstance( + _BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteInstance") @@ -571,26 +635,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -610,15 +676,28 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + ) request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -631,7 +710,9 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) return resp - class _GetCertificateAuthority(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub): + class _GetCertificateAuthority( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetCertificateAuthority") @@ -643,26 +724,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetCertificateAuthorityRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.CertificateAuthority: + def __call__( + self, + request: memorystore.GetCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.CertificateAuthority: r"""Call the get certificate authority method over HTTP. Args: @@ -681,15 +764,30 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() - request, metadata = self._interceptor.pre_get_certificate_authority(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + ) + request, metadata = self._interceptor.pre_get_certificate_authority( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._GetCertificateAuthority._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetCertificateAuthority._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -704,7 +802,9 @@ def __call__(self, resp = self._interceptor.post_get_certificate_authority(resp) return resp - class _GetInstance(_BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub): + class _GetInstance( + _BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetInstance") @@ -716,26 +816,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.Instance: + def __call__( + self, + request: memorystore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.Instance: r"""Call the get instance method over HTTP. Args: @@ -752,15 +854,32 @@ def __call__(self, A Memorystore instance. """ - http_options = _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + ) request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -775,7 +894,9 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp - class _ListInstances(_BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub): + class _ListInstances( + _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListInstances") @@ -787,26 +908,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: memorystore.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> memorystore.ListInstancesResponse: + def __call__( + self, + request: memorystore.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> memorystore.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -823,15 +946,30 @@ def __call__(self, Response message for [ListInstances][]. """ - http_options = _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + ) request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -846,7 +984,9 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp - class _UpdateInstance(_BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub): + class _UpdateInstance( + _BaseMemorystoreRestTransport._BaseUpdateInstance, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.UpdateInstance") @@ -858,27 +998,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: memorystore.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: memorystore.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -898,17 +1040,33 @@ def __call__(self, """ - http_options = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = MemorystoreRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -922,58 +1080,64 @@ def __call__(self, return resp @property - def create_instance(self) -> Callable[ - [memorystore.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[memorystore.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [memorystore.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[memorystore.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_certificate_authority(self) -> Callable[ - [memorystore.GetCertificateAuthorityRequest], - memorystore.CertificateAuthority]: + def get_certificate_authority( + self, + ) -> Callable[ + [memorystore.GetCertificateAuthorityRequest], memorystore.CertificateAuthority + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore + return self._GetCertificateAuthority(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [memorystore.GetInstanceRequest], - memorystore.Instance]: + def get_instance( + self, + ) -> Callable[[memorystore.GetInstanceRequest], memorystore.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [memorystore.ListInstancesRequest], - memorystore.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [memorystore.ListInstancesRequest], memorystore.ListInstancesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [memorystore.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[memorystore.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub): + class _GetLocation( + _BaseMemorystoreRestTransport._BaseGetLocation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetLocation") @@ -985,27 +1149,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1021,15 +1186,32 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1044,9 +1226,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub): + class _ListLocations( + _BaseMemorystoreRestTransport._BaseListLocations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListLocations") @@ -1058,27 +1242,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1094,15 +1279,30 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1117,9 +1317,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub): + class _CancelOperation( + _BaseMemorystoreRestTransport._BaseCancelOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.CancelOperation") @@ -1131,27 +1333,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1164,15 +1367,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseCancelOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1183,9 +1401,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub): + class _DeleteOperation( + _BaseMemorystoreRestTransport._BaseDeleteOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.DeleteOperation") @@ -1197,27 +1417,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1230,15 +1451,30 @@ def __call__(self, sent along with the request as metadata. """ - http_options = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteOperation._get_http_options() + ) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1249,9 +1485,11 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub): + class _GetOperation( + _BaseMemorystoreRestTransport._BaseGetOperation, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.GetOperation") @@ -1263,27 +1501,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1299,15 +1538,32 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseMemorystoreRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) # Send the request - response = MemorystoreRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1322,9 +1578,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub): + class _ListOperations( + _BaseMemorystoreRestTransport._BaseListOperations, MemorystoreRestStub + ): def __hash__(self): return hash("MemorystoreRestTransport.ListOperations") @@ -1336,27 +1594,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1372,15 +1631,28 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseMemorystoreRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseMemorystoreRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = _BaseMemorystoreRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) # Send the request - response = MemorystoreRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = MemorystoreRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1401,6 +1673,4 @@ def close(self): self._session.close() -__all__=( - 'MemorystoreRestTransport', -) +__all__ = ("MemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py similarity index 59% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py index 7374ad3d00d2..6c2af86b90d1 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest_base.py @@ -14,19 +14,17 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import MemorystoreTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore + +from .base import DEFAULT_CLIENT_INFO, MemorystoreTransport class _BaseMemorystoreRestTransport(MemorystoreTransport): @@ -42,14 +40,16 @@ class _BaseMemorystoreRestTransport(MemorystoreTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'memorystore.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "memorystore.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +73,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,27 +86,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,17 +127,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -138,19 +152,23 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -162,11 +180,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -175,19 +199,23 @@ class _BaseGetCertificateAuthority: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority", + }, ] return http_options @@ -199,11 +227,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -212,19 +246,23 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -236,11 +274,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -249,19 +293,23 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -273,11 +321,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseListInstances._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -286,20 +340,24 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1beta/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -314,17 +372,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True + transcoded_request["body"], use_integers_for_enums=True ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseUpdateInstance._get_unset_required_fields( + query_params + ) + ) query_params["$alt"] = "json;enum-encoding=int" return query_params @@ -335,23 +399,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -360,23 +424,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -385,23 +449,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -410,23 +474,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -435,23 +499,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -460,26 +524,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseMemorystoreRestTransport', -) +__all__ = ("_BaseMemorystoreRestTransport",) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py similarity index 69% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py index fc13543f2db2..a639afb23c9d 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/__init__.py @@ -15,6 +15,7 @@ # from .memorystore import ( CertificateAuthority, + ConnectionType, CreateInstanceRequest, DeleteInstanceRequest, DiscoveryEndpoint, @@ -28,29 +29,28 @@ PersistenceConfig, PscAutoConnection, PscConnection, + PscConnectionStatus, UpdateInstanceRequest, ZoneDistributionConfig, - ConnectionType, - PscConnectionStatus, ) __all__ = ( - 'CertificateAuthority', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'DiscoveryEndpoint', - 'GetCertificateAuthorityRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'NodeConfig', - 'OperationMetadata', - 'PersistenceConfig', - 'PscAutoConnection', - 'PscConnection', - 'UpdateInstanceRequest', - 'ZoneDistributionConfig', - 'ConnectionType', - 'PscConnectionStatus', + "CertificateAuthority", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "DiscoveryEndpoint", + "GetCertificateAuthorityRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "NodeConfig", + "OperationMetadata", + "PersistenceConfig", + "PscAutoConnection", + "PscConnection", + "UpdateInstanceRequest", + "ZoneDistributionConfig", + "ConnectionType", + "PscConnectionStatus", ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py similarity index 93% rename from owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py rename to packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py index dd56d5678ceb..4cfef649dfae 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/google/cloud/memorystore_v1beta/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py @@ -17,33 +17,31 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.memorystore.v1beta', + package="google.cloud.memorystore.v1beta", manifest={ - 'PscConnectionStatus', - 'ConnectionType', - 'Instance', - 'PscAutoConnection', - 'PscConnection', - 'DiscoveryEndpoint', - 'PersistenceConfig', - 'NodeConfig', - 'ZoneDistributionConfig', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'GetCertificateAuthorityRequest', - 'CertificateAuthority', - 'OperationMetadata', + "PscConnectionStatus", + "ConnectionType", + "Instance", + "PscAutoConnection", + "PscConnection", + "DiscoveryEndpoint", + "PersistenceConfig", + "NodeConfig", + "ZoneDistributionConfig", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "GetCertificateAuthorityRequest", + "CertificateAuthority", + "OperationMetadata", }, ) @@ -162,6 +160,7 @@ class Instance(proto.Message): mode (google.cloud.memorystore_v1beta.types.Instance.Mode): Optional. The mode config for the instance. """ + class State(proto.Enum): r"""Possible states of the instance. @@ -295,11 +294,11 @@ class UpdateInfo(proto.Message): optional=True, ) - update_info: 'Instance.StateInfo.UpdateInfo' = proto.Field( + update_info: "Instance.StateInfo.UpdateInfo" = proto.Field( proto.MESSAGE, number=1, - oneof='info', - message='Instance.StateInfo.UpdateInfo', + oneof="info", + message="Instance.StateInfo.UpdateInfo", ) class InstanceEndpoint(proto.Message): @@ -315,10 +314,10 @@ class InstanceEndpoint(proto.Message): each service attachment in the cluster. """ - connections: MutableSequence['Instance.ConnectionDetail'] = proto.RepeatedField( + connections: MutableSequence["Instance.ConnectionDetail"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance.ConnectionDetail', + message="Instance.ConnectionDetail", ) class ConnectionDetail(proto.Message): @@ -345,17 +344,17 @@ class ConnectionDetail(proto.Message): This field is a member of `oneof`_ ``connection``. """ - psc_auto_connection: 'PscAutoConnection' = proto.Field( + psc_auto_connection: "PscAutoConnection" = proto.Field( proto.MESSAGE, number=1, - oneof='connection', - message='PscAutoConnection', + oneof="connection", + message="PscAutoConnection", ) - psc_connection: 'PscConnection' = proto.Field( + psc_connection: "PscConnection" = proto.Field( proto.MESSAGE, number=2, - oneof='connection', - message='PscConnection', + oneof="connection", + message="PscConnection", ) name: str = proto.Field( @@ -410,20 +409,20 @@ class ConnectionDetail(proto.Message): proto.INT32, number=11, ) - discovery_endpoints: MutableSequence['DiscoveryEndpoint'] = proto.RepeatedField( + discovery_endpoints: MutableSequence["DiscoveryEndpoint"] = proto.RepeatedField( proto.MESSAGE, number=12, - message='DiscoveryEndpoint', + message="DiscoveryEndpoint", ) node_type: NodeType = proto.Field( proto.ENUM, number=13, enum=NodeType, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=14, - message='PersistenceConfig', + message="PersistenceConfig", ) engine_version: str = proto.Field( proto.STRING, @@ -434,25 +433,25 @@ class ConnectionDetail(proto.Message): proto.STRING, number=16, ) - node_config: 'NodeConfig' = proto.Field( + node_config: "NodeConfig" = proto.Field( proto.MESSAGE, number=17, - message='NodeConfig', + message="NodeConfig", ) - zone_distribution_config: 'ZoneDistributionConfig' = proto.Field( + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( proto.MESSAGE, number=18, - message='ZoneDistributionConfig', + message="ZoneDistributionConfig", ) deletion_protection_enabled: bool = proto.Field( proto.BOOL, number=19, optional=True, ) - psc_auto_connections: MutableSequence['PscAutoConnection'] = proto.RepeatedField( + psc_auto_connections: MutableSequence["PscAutoConnection"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='PscAutoConnection', + message="PscAutoConnection", ) endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( proto.MESSAGE, @@ -514,7 +513,7 @@ class PscAutoConnection(proto.Message): port: int = proto.Field( proto.INT32, number=9, - oneof='ports', + oneof="ports", ) psc_connection_id: str = proto.Field( proto.STRING, @@ -540,15 +539,15 @@ class PscAutoConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -613,15 +612,15 @@ class PscConnection(proto.Message): proto.STRING, number=6, ) - psc_connection_status: 'PscConnectionStatus' = proto.Field( + psc_connection_status: "PscConnectionStatus" = proto.Field( proto.ENUM, number=7, - enum='PscConnectionStatus', + enum="PscConnectionStatus", ) - connection_type: 'ConnectionType' = proto.Field( + connection_type: "ConnectionType" = proto.Field( proto.ENUM, number=8, - enum='ConnectionType', + enum="ConnectionType", ) @@ -669,6 +668,7 @@ class PersistenceConfig(proto.Message): Optional. AOF configuration. This field will be ignored if mode is not AOF. """ + class PersistenceMode(proto.Enum): r"""Possible persistence modes. @@ -700,6 +700,7 @@ class RDBConfig(proto.Message): snapshots will be aligned. If not provided, the current time will be used. """ + class SnapshotPeriod(proto.Enum): r"""Possible snapshot periods. @@ -721,10 +722,10 @@ class SnapshotPeriod(proto.Enum): TWELVE_HOURS = 3 TWENTY_FOUR_HOURS = 4 - rdb_snapshot_period: 'PersistenceConfig.RDBConfig.SnapshotPeriod' = proto.Field( + rdb_snapshot_period: "PersistenceConfig.RDBConfig.SnapshotPeriod" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.RDBConfig.SnapshotPeriod', + enum="PersistenceConfig.RDBConfig.SnapshotPeriod", ) rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -739,6 +740,7 @@ class AOFConfig(proto.Message): append_fsync (google.cloud.memorystore_v1beta.types.PersistenceConfig.AOFConfig.AppendFsync): Optional. The fsync mode. """ + class AppendFsync(proto.Enum): r"""Possible fsync modes. @@ -762,10 +764,10 @@ class AppendFsync(proto.Enum): EVERY_SEC = 2 ALWAYS = 3 - append_fsync: 'PersistenceConfig.AOFConfig.AppendFsync' = proto.Field( + append_fsync: "PersistenceConfig.AOFConfig.AppendFsync" = proto.Field( proto.ENUM, number=1, - enum='PersistenceConfig.AOFConfig.AppendFsync', + enum="PersistenceConfig.AOFConfig.AppendFsync", ) mode: PersistenceMode = proto.Field( @@ -811,6 +813,7 @@ class ZoneDistributionConfig(proto.Message): Optional. Current zone distribution mode. Defaults to MULTI_ZONE. """ + class ZoneDistributionMode(proto.Enum): r"""Possible zone distribution modes. @@ -904,10 +907,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -986,10 +989,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1034,10 +1037,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) request_id: str = proto.Field( proto.STRING, @@ -1142,16 +1145,18 @@ class CertChain(proto.Message): number=1, ) - ca_certs: MutableSequence['CertificateAuthority.ManagedCertificateAuthority.CertChain'] = proto.RepeatedField( + ca_certs: MutableSequence[ + "CertificateAuthority.ManagedCertificateAuthority.CertChain" + ] = proto.RepeatedField( proto.MESSAGE, number=1, - message='CertificateAuthority.ManagedCertificateAuthority.CertChain', + message="CertificateAuthority.ManagedCertificateAuthority.CertChain", ) managed_server_ca: ManagedCertificateAuthority = proto.Field( proto.MESSAGE, number=2, - oneof='server_ca', + oneof="server_ca", message=ManagedCertificateAuthority, ) name: str = proto.Field( diff --git a/owl-bot-staging/google-cloud-memorystore/v1/mypy.ini b/packages/google-cloud-memorystore/mypy.ini similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/mypy.ini rename to packages/google-cloud-memorystore/mypy.ini diff --git a/packages/google-cloud-memorystore/noxfile.py b/packages/google-cloud-memorystore/noxfile.py new file mode 100644 index 000000000000..a9ceef47133c --- /dev/null +++ b/packages/google-cloud-memorystore/noxfile.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_certificate_authority_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_instances_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_create_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_delete_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_certificate_authority_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_get_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_list_instances_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py rename to packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1beta_generated_memorystore_update_instance_sync.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json rename to packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json rename to packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json diff --git a/packages/google-cloud-memorystore/scripts/decrypt-secrets.sh b/packages/google-cloud-memorystore/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-cloud-memorystore/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py b/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/scripts/fixup_memorystore_v1_keywords.py rename to packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py b/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1beta_keywords.py similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1beta/scripts/fixup_memorystore_v1beta_keywords.py rename to packages/google-cloud-memorystore/scripts/fixup_memorystore_v1beta_keywords.py diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py b/packages/google-cloud-memorystore/setup.py similarity index 93% rename from owl-bot-staging/google-cloud-memorystore/v1beta/setup.py rename to packages/google-cloud-memorystore/setup.py index 3cb03e5a17ca..f85d7df9f3c1 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/setup.py +++ b/packages/google-cloud-memorystore/setup.py @@ -17,20 +17,22 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-memorystore' +name = "google-cloud-memorystore" description = "Google Cloud Memorystore API client library" version = None -with open(os.path.join(package_root, 'google/cloud/memorystore/gapic_version.py')) as fp: +with open( + os.path.join(package_root, "google/cloud/memorystore/gapic_version.py") +) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -47,8 +49,7 @@ "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/google-cloud-memorystore/testing/.gitignore b/packages/google-cloud-memorystore/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-memorystore/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt b/packages/google-cloud-memorystore/testing/constraints-3.10.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.10.txt rename to packages/google-cloud-memorystore/testing/constraints-3.10.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt b/packages/google-cloud-memorystore/testing/constraints-3.11.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.11.txt rename to packages/google-cloud-memorystore/testing/constraints-3.11.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt b/packages/google-cloud-memorystore/testing/constraints-3.12.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.12.txt rename to packages/google-cloud-memorystore/testing/constraints-3.12.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt b/packages/google-cloud-memorystore/testing/constraints-3.13.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.13.txt rename to packages/google-cloud-memorystore/testing/constraints-3.13.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt b/packages/google-cloud-memorystore/testing/constraints-3.7.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.7.txt rename to packages/google-cloud-memorystore/testing/constraints-3.7.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt b/packages/google-cloud-memorystore/testing/constraints-3.8.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.8.txt rename to packages/google-cloud-memorystore/testing/constraints-3.8.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt b/packages/google-cloud-memorystore/testing/constraints-3.9.txt similarity index 100% rename from owl-bot-staging/google-cloud-memorystore/v1/testing/constraints-3.9.txt rename to packages/google-cloud-memorystore/testing/constraints-3.9.txt diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py b/packages/google-cloud-memorystore/tests/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py rename to packages/google-cloud-memorystore/tests/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-memorystore/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py b/packages/google-cloud-memorystore/tests/unit/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py rename to packages/google-cloud-memorystore/tests/unit/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/__init__.py +++ b/packages/google-cloud-memorystore/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py b/packages/google-cloud-memorystore/tests/unit/gapic/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py rename to packages/google-cloud-memorystore/tests/unit/gapic/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/__init__.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/__init__.py similarity index 99% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py rename to packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/__init__.py index 7b3de3117f38..8f6cf068242c 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/__init__.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2024 Google LLC # diff --git a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py similarity index 64% rename from owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py rename to packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py index 75206a8bb252..52921e82fffd 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1/tests/unit/gapic/memorystore_v1/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,51 +22,56 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.cloud.memorystore_v1.services.memorystore import MemorystoreClient -from google.cloud.memorystore_v1.services.memorystore import pagers -from google.cloud.memorystore_v1.services.memorystore import transports -from google.cloud.memorystore_v1.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth + +from google.cloud.memorystore_v1.services.memorystore import ( + MemorystoreClient, + pagers, + transports, +) +from google.cloud.memorystore_v1.types import memorystore async def mock_async_gen(data, chunk_size=1): @@ -73,9 +79,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -83,17 +91,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -104,12 +122,24 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert MemorystoreClient._get_default_mtls_endpoint(None) is None - assert MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert MemorystoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + def test__read_environment_variables(): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -119,16 +149,25 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert MemorystoreClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MemorystoreClient._read_environment_variables() == (False, "always", None) + assert MemorystoreClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -136,65 +175,149 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MemorystoreClient._read_environment_variables() == (False, "auto", "foo.com") + assert MemorystoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert MemorystoreClient._get_client_cert_source(None, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MemorystoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MemorystoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MemorystoreClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert MemorystoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + MemorystoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert MemorystoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MemorystoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MemorystoreClient._get_universe_domain(None, None) == MemorystoreClient._DEFAULT_UNIVERSE + assert ( + MemorystoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MemorystoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MemorystoreClient._get_universe_domain(None, None) + == MemorystoreClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: MemorystoreClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -202,48 +325,64 @@ def test_memorystore_client_from_service_account_info(client_class, transport_na assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) @@ -258,27 +397,34 @@ def test_memorystore_client_get_transport_class(): assert transport == transports.MemorystoreRestTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) -def test_memorystore_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) +def test_memorystore_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -296,13 +442,15 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -314,7 +462,7 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -334,23 +482,33 @@ def test_memorystore_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -359,43 +517,63 @@ def test_memorystore_client_client_options(client_class, transport_class, transp api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_memorystore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_memorystore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -414,12 +592,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -440,15 +628,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -458,18 +653,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient) +) def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -477,8 +676,12 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -496,16 +699,28 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -515,33 +730,55 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test_memorystore_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -564,11 +801,19 @@ def test_memorystore_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -576,25 +821,34 @@ def test_memorystore_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -603,22 +857,28 @@ def test_memorystore_client_client_options_scopes(client_class, transport_class, api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), -]) -def test_memorystore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), + ], +) +def test_memorystore_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +906,9 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -662,57 +924,69 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_instances_rest_required_fields(request_type=memorystore.ListInstancesRequest): +def test_list_instances_rest_required_fields( + request_type=memorystore.ListInstancesRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -723,23 +997,33 @@ def test_list_instances_rest_required_fields(request_type=memorystore.ListInstan return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_instances_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_instances_rest_flattened(): @@ -749,16 +1033,16 @@ def test_list_instances_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -768,7 +1052,7 @@ def test_list_instances_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_instances(**mock_args) @@ -777,10 +1061,13 @@ def test_list_instances_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_list_instances_rest_flattened_error(transport: str = 'rest'): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -791,20 +1078,20 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( memorystore.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_instances_rest_pager(transport: str = 'rest'): +def test_list_instances_rest_pager(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( memorystore.ListInstancesResponse( @@ -813,17 +1100,17 @@ def test_list_instances_rest_pager(transport: str = 'rest'): memorystore.Instance(), memorystore.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), memorystore.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), memorystore.ListInstancesResponse( instances=[ memorystore.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), memorystore.ListInstancesResponse( instances=[ @@ -839,21 +1126,20 @@ def test_list_instances_rest_pager(transport: str = 'rest'): response = tuple(memorystore.ListInstancesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, memorystore.Instance) - for i in results) + assert all(isinstance(i, memorystore.Instance) for i in results) pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -875,7 +1161,9 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -898,48 +1186,51 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -950,23 +1241,23 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_rest_flattened(): @@ -976,16 +1267,18 @@ def test_get_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -995,7 +1288,7 @@ def test_get_instance_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_instance(**mock_args) @@ -1004,10 +1297,13 @@ def test_get_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1018,7 +1314,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( memorystore.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1040,7 +1336,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -1060,7 +1358,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_instance_rest_required_fields(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=memorystore.CreateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} @@ -1068,65 +1368,73 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -1136,17 +1444,33 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns "instanceId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", "requestId", )) & set(("parent", "instanceId", "instance", ))) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) def test_create_instance_rest_flattened(): @@ -1156,18 +1480,18 @@ def test_create_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) mock_args.update(sample_request) @@ -1175,7 +1499,7 @@ def test_create_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_instance(**mock_args) @@ -1184,10 +1508,13 @@ def test_create_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_create_instance_rest_flattened_error(transport: str = 'rest'): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1198,9 +1525,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( memorystore.CreateInstanceRequest(), - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) @@ -1222,7 +1549,9 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -1242,77 +1571,95 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_instance_rest_required_fields(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_required_fields( + request_type=memorystore.UpdateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("instance", ))) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("instance",)) + ) def test_update_instance_rest_flattened(): @@ -1322,17 +1669,19 @@ def test_update_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } # get truthy value for each flattened field mock_args = dict( - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -1340,7 +1689,7 @@ def test_update_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.update_instance(**mock_args) @@ -1349,10 +1698,14 @@ def test_update_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_update_instance_rest_flattened_error(transport: str = 'rest'): +def test_update_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1363,8 +1716,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( memorystore.UpdateInstanceRequest(), - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1386,7 +1739,9 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -1406,57 +1761,62 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_required_fields( + request_type=memorystore.DeleteInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1464,23 +1824,23 @@ def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteIns response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) def test_delete_instance_rest_flattened(): @@ -1490,16 +1850,18 @@ def test_delete_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1507,7 +1869,7 @@ def test_delete_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_instance(**mock_args) @@ -1516,10 +1878,13 @@ def test_delete_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1530,7 +1895,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( memorystore.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1548,12 +1913,19 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_certificate_authority in client._transport._wrapped_methods + assert ( + client._transport.get_certificate_authority + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_certificate_authority] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_certificate_authority + ] = mock_rpc request = {} client.get_certificate_authority(request) @@ -1568,55 +1940,60 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_certificate_authority_rest_required_fields(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_required_fields( + request_type=memorystore.GetCertificateAuthorityRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1627,23 +2004,23 @@ def test_get_certificate_authority_rest_required_fields(request_type=memorystore return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_certificate_authority_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_certificate_authority._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_certificate_authority_rest_flattened(): @@ -1653,16 +2030,18 @@ def test_get_certificate_authority_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1672,7 +2051,7 @@ def test_get_certificate_authority_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_certificate_authority(**mock_args) @@ -1681,10 +2060,14 @@ def test_get_certificate_authority_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}/certificateAuthority" + % client.transport._host, + args[1], + ) -def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest'): +def test_get_certificate_authority_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1695,7 +2078,7 @@ def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.get_certificate_authority( memorystore.GetCertificateAuthorityRequest(), - name='name_value', + name="name_value", ) @@ -1737,8 +2120,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = MemorystoreClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -1761,16 +2143,20 @@ def test_transport_instance(): assert client.transport is transport -@pytest.mark.parametrize("transport_class", [ - transports.MemorystoreRestTransport, -]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MemorystoreRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_rest(): transport = MemorystoreClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -1780,18 +2166,19 @@ def test_transport_kind_rest(): def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1799,26 +2186,28 @@ def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesR client.list_instances(request) -@pytest.mark.parametrize("request_type", [ - memorystore.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.ListInstancesRequest, + dict, + ], +) def test_list_instances_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -1828,31 +2217,40 @@ def test_list_instances_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_list_instances") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_instances" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.ListInstancesRequest.pb(memorystore.ListInstancesRequest()) + pb_message = memorystore.ListInstancesRequest.pb( + memorystore.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1862,18 +2260,26 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.ListInstancesResponse.to_json(memorystore.ListInstancesResponse()) + return_value = memorystore.ListInstancesResponse.to_json( + memorystore.ListInstancesResponse() + ) req.return_value.content = return_value request = memorystore.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.ListInstancesResponse() - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -1881,18 +2287,19 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1900,35 +2307,37 @@ def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceReque client.get_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetInstanceRequest, + dict, + ], +) def test_get_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance( - name='name_value', - state=memorystore.Instance.State.CREATING, - uid='uid_value', - replica_count=1384, - authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, - transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, - shard_count=1178, - node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, - engine_version='engine_version_value', - deletion_protection_enabled=True, - mode=memorystore.Instance.Mode.STANDALONE, + name="name_value", + state=memorystore.Instance.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version="engine_version_value", + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, ) # Wrap the value into a proper Response obj @@ -1938,21 +2347,27 @@ def test_get_instance_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.Instance) - assert response.name == 'name_value' + assert response.name == "name_value" assert response.state == memorystore.Instance.State.CREATING - assert response.uid == 'uid_value' + assert response.uid == "uid_value" assert response.replica_count == 1384 - assert response.authorization_mode == memorystore.Instance.AuthorizationMode.AUTH_DISABLED - assert response.transit_encryption_mode == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + assert ( + response.authorization_mode + == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + ) + assert ( + response.transit_encryption_mode + == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + ) assert response.shard_count == 1178 assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO - assert response.engine_version == 'engine_version_value' + assert response.engine_version == "engine_version_value" assert response.deletion_protection_enabled is True assert response.mode == memorystore.Instance.Mode.STANDALONE @@ -1961,14 +2376,21 @@ def test_get_instance_rest_call_success(request_type): def test_get_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) @@ -1985,33 +2407,42 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.Instance() - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_bad_request( + request_type=memorystore.CreateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2019,19 +2450,82 @@ def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanc client.create_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.CreateInstanceRequest, + dict, + ], +) def test_create_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2051,7 +2545,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2065,7 +2559,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2080,12 +2574,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2098,15 +2596,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -2118,18 +2616,28 @@ def get_message_fields(field): def test_create_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_create_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_create_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.CreateInstanceRequest.pb(memorystore.CreateInstanceRequest()) + pb_message = memorystore.CreateInstanceRequest.pb( + memorystore.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2143,33 +2651,44 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_bad_request( + request_type=memorystore.UpdateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2177,19 +2696,84 @@ def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanc client.update_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.UpdateInstanceRequest, + dict, + ], +) def test_update_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2209,7 +2793,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2223,7 +2807,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2238,12 +2822,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2256,15 +2844,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) @@ -2276,18 +2864,28 @@ def get_message_fields(field): def test_update_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_update_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_update_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.UpdateInstanceRequest.pb(memorystore.UpdateInstanceRequest()) + pb_message = memorystore.UpdateInstanceRequest.pb( + memorystore.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2301,33 +2899,42 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_bad_request( + request_type=memorystore.DeleteInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2335,30 +2942,32 @@ def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanc client.delete_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) @@ -2370,18 +2979,28 @@ def test_delete_instance_rest_call_success(request_type): def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_delete_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_delete_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.DeleteInstanceRequest.pb(memorystore.DeleteInstanceRequest()) + pb_message = memorystore.DeleteInstanceRequest.pb( + memorystore.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2395,33 +3014,42 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_certificate_authority_rest_bad_request(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_bad_request( + request_type=memorystore.GetCertificateAuthorityRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2429,25 +3057,27 @@ def test_get_certificate_authority_rest_bad_request(request_type=memorystore.Get client.get_certificate_authority(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetCertificateAuthorityRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetCertificateAuthorityRequest, + dict, + ], +) def test_get_certificate_authority_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority( - name='name_value', + name="name_value", ) # Wrap the value into a proper Response obj @@ -2457,30 +3087,39 @@ def test_get_certificate_authority_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.CertificateAuthority) - assert response.name == 'name_value' + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_certificate_authority_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_certificate_authority") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_certificate_authority") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_certificate_authority" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.GetCertificateAuthorityRequest.pb(memorystore.GetCertificateAuthorityRequest()) + pb_message = memorystore.GetCertificateAuthorityRequest.pb( + memorystore.GetCertificateAuthorityRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2490,18 +3129,26 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.CertificateAuthority.to_json(memorystore.CertificateAuthority()) + return_value = memorystore.CertificateAuthority.to_json( + memorystore.CertificateAuthority() + ) req.return_value.content = return_value request = memorystore.GetCertificateAuthorityRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.CertificateAuthority() - client.get_certificate_authority(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_certificate_authority( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -2513,13 +3160,17 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2527,20 +3178,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -2548,7 +3202,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2558,19 +3212,23 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2578,20 +3236,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -2599,7 +3260,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2609,19 +3270,25 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2629,28 +3296,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2660,19 +3330,25 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2680,28 +3356,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2711,19 +3390,25 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2731,20 +3416,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -2752,7 +3440,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2762,19 +3450,25 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2782,20 +3476,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -2803,7 +3500,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2812,10 +3509,10 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_initialize_client_w_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -2829,9 +3526,7 @@ def test_list_instances_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -2851,9 +3546,7 @@ def test_get_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -2873,9 +3566,7 @@ def test_create_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -2895,9 +3586,7 @@ def test_update_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -2917,9 +3606,7 @@ def test_delete_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -2940,8 +3627,8 @@ def test_get_certificate_authority_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_certificate_authority), - '__call__') as call: + type(client.transport.get_certificate_authority), "__call__" + ) as call: client.get_certificate_authority(request=None) # Establish that the underlying stub method was called. @@ -2962,7 +3649,7 @@ def test_memorystore_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. @@ -2974,13 +3661,15 @@ def test_memorystore_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_memorystore_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport.__init__') as Transport: + with mock.patch( + "google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2989,18 +3678,18 @@ def test_memorystore_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_instances', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'get_certificate_authority', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "get_certificate_authority", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3016,7 +3705,7 @@ def test_memorystore_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3025,25 +3714,30 @@ def test_memorystore_base_transport(): def test_memorystore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_memorystore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.memorystore_v1.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport() @@ -3052,61 +3746,75 @@ def test_memorystore_base_transport_with_adc(): def test_memorystore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) MemorystoreClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) def test_memorystore_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MemorystoreRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MemorystoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_no_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_with_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com:8000' + "memorystore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -3137,11 +3845,16 @@ def test_memorystore_client_transport_session_collision(transport_name): session2 = client2.transport.get_certificate_authority._session assert session1 != session2 + def test_certificate_authority_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.certificate_authority_path(project, location, instance) assert expected == actual @@ -3158,11 +3871,18 @@ def test_parse_certificate_authority_path(): actual = MemorystoreClient.parse_certificate_authority_path(path) assert expected == actual + def test_forwarding_rule_path(): project = "cuttlefish" region = "mussel" forwarding_rule = "winkle" - expected = "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + expected = ( + "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) + ) actual = MemorystoreClient.forwarding_rule_path(project, region, forwarding_rule) assert expected == actual @@ -3179,11 +3899,16 @@ def test_parse_forwarding_rule_path(): actual = MemorystoreClient.parse_forwarding_rule_path(path) assert expected == actual + def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.instance_path(project, location, instance) assert expected == actual @@ -3200,10 +3925,14 @@ def test_parse_instance_path(): actual = MemorystoreClient.parse_instance_path(path) assert expected == actual + def test_network_path(): project = "cuttlefish" network = "mussel" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) actual = MemorystoreClient.network_path(project, network) assert expected == actual @@ -3219,12 +3948,19 @@ def test_parse_network_path(): actual = MemorystoreClient.parse_network_path(path) assert expected == actual + def test_service_attachment_path(): project = "scallop" region = "abalone" service_attachment = "squid" - expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) - actual = MemorystoreClient.service_attachment_path(project, region, service_attachment) + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = MemorystoreClient.service_attachment_path( + project, region, service_attachment + ) assert expected == actual @@ -3240,9 +3976,12 @@ def test_parse_service_attachment_path(): actual = MemorystoreClient.parse_service_attachment_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = MemorystoreClient.common_billing_account_path(billing_account) assert expected == actual @@ -3257,9 +3996,12 @@ def test_parse_common_billing_account_path(): actual = MemorystoreClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MemorystoreClient.common_folder_path(folder) assert expected == actual @@ -3274,9 +4016,12 @@ def test_parse_common_folder_path(): actual = MemorystoreClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MemorystoreClient.common_organization_path(organization) assert expected == actual @@ -3291,9 +4036,12 @@ def test_parse_common_organization_path(): actual = MemorystoreClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = MemorystoreClient.common_project_path(project) assert expected == actual @@ -3308,10 +4056,14 @@ def test_parse_common_project_path(): actual = MemorystoreClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = MemorystoreClient.common_location_path(project, location) assert expected == actual @@ -3331,14 +4083,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: transport_class = MemorystoreClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3349,10 +4105,11 @@ def test_client_with_default_client_info(): def test_transport_close_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -3360,12 +4117,11 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', + "rest", ] for transport in transports: client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -3374,9 +4130,13 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (MemorystoreClient, transports.MemorystoreRestTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MemorystoreClient, transports.MemorystoreRestTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -3391,7 +4151,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py similarity index 64% rename from owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py rename to packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py index 25ba80cdffb7..c0873f072812 100644 --- a/owl-bot-staging/google-cloud-memorystore/v1beta/tests/unit/gapic/memorystore_v1beta/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,51 +22,56 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format +from collections.abc import AsyncIterable, Iterable import json import math -import pytest + from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +import grpc +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 -from google.cloud.memorystore_v1beta.services.memorystore import MemorystoreClient -from google.cloud.memorystore_v1beta.services.memorystore import pagers -from google.cloud.memorystore_v1beta.services.memorystore import transports -from google.cloud.memorystore_v1beta.types import memorystore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth + +from google.cloud.memorystore_v1beta.services.memorystore import ( + MemorystoreClient, + pagers, + transports, +) +from google.cloud.memorystore_v1beta.types import memorystore async def mock_async_gen(data, chunk_size=1): @@ -73,9 +79,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -83,17 +91,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -104,12 +122,24 @@ def test__get_default_mtls_endpoint(): non_googleapi = "api.example.com" assert MemorystoreClient._get_default_mtls_endpoint(None) is None - assert MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MemorystoreClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert MemorystoreClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + def test__read_environment_variables(): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -119,16 +149,25 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert MemorystoreClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MemorystoreClient._read_environment_variables() == (False, "always", None) + assert MemorystoreClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): assert MemorystoreClient._read_environment_variables() == (False, "auto", None) @@ -136,65 +175,149 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: MemorystoreClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MemorystoreClient._read_environment_variables() == (False, "auto", "foo.com") + assert MemorystoreClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert MemorystoreClient._get_client_cert_source(None, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MemorystoreClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MemorystoreClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MemorystoreClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MemorystoreClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MemorystoreClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert MemorystoreClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MemorystoreClient.DEFAULT_MTLS_ENDPOINT - assert MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + MemorystoreClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "always") + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MemorystoreClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MemorystoreClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - MemorystoreClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + MemorystoreClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert MemorystoreClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MemorystoreClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MemorystoreClient._get_universe_domain(None, None) == MemorystoreClient._DEFAULT_UNIVERSE + assert ( + MemorystoreClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MemorystoreClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MemorystoreClient._get_universe_domain(None, None) + == MemorystoreClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: MemorystoreClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -202,48 +325,64 @@ def test_memorystore_client_from_service_account_info(client_class, transport_na assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (MemorystoreClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MemorystoreClient, "rest"), + ], +) def test_memorystore_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) @@ -258,27 +397,34 @@ def test_memorystore_client_get_transport_class(): assert transport == transports.MemorystoreRestTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) -def test_memorystore_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) +def test_memorystore_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MemorystoreClient, 'get_transport_class') as gtc: + with mock.patch.object(MemorystoreClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -296,13 +442,15 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -314,7 +462,7 @@ def test_memorystore_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -334,23 +482,33 @@ def test_memorystore_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -359,43 +517,63 @@ def test_memorystore_client_client_options(client_class, transport_class, transp api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "true"), + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_memorystore_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_memorystore_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -414,12 +592,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -440,15 +628,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -458,18 +653,22 @@ def test_memorystore_client_mtls_env_auto(client_class, transport_class, transpo ) -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MemorystoreClient) +) def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -477,8 +676,12 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None @@ -496,16 +699,28 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -515,33 +730,55 @@ def test_memorystore_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): with pytest.raises(ValueError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + -@pytest.mark.parametrize("client_class", [ - MemorystoreClient -]) -@mock.patch.object(MemorystoreClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MemorystoreClient)) +@pytest.mark.parametrize("client_class", [MemorystoreClient]) +@mock.patch.object( + MemorystoreClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MemorystoreClient), +) def test_memorystore_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = MemorystoreClient._DEFAULT_UNIVERSE - default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MemorystoreClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -564,11 +801,19 @@ def test_memorystore_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -576,25 +821,34 @@ def test_memorystore_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), -]) -def test_memorystore_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest"), + ], +) +def test_memorystore_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -603,22 +857,28 @@ def test_memorystore_client_client_options_scopes(client_class, transport_class, api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), -]) -def test_memorystore_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MemorystoreClient, transports.MemorystoreRestTransport, "rest", None), + ], +) +def test_memorystore_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +906,9 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -662,57 +924,69 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_instances_rest_required_fields(request_type=memorystore.ListInstancesRequest): +def test_list_instances_rest_required_fields( + request_type=memorystore.ListInstancesRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -723,23 +997,33 @@ def test_list_instances_rest_required_fields(request_type=memorystore.ListInstan return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_instances_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_instances_rest_flattened(): @@ -749,16 +1033,16 @@ def test_list_instances_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -768,7 +1052,7 @@ def test_list_instances_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.list_instances(**mock_args) @@ -777,10 +1061,14 @@ def test_list_instances_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/instances" + % client.transport._host, + args[1], + ) -def test_list_instances_rest_flattened_error(transport: str = 'rest'): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -791,20 +1079,20 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( memorystore.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_instances_rest_pager(transport: str = 'rest'): +def test_list_instances_rest_pager(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( memorystore.ListInstancesResponse( @@ -813,17 +1101,17 @@ def test_list_instances_rest_pager(transport: str = 'rest'): memorystore.Instance(), memorystore.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), memorystore.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), memorystore.ListInstancesResponse( instances=[ memorystore.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), memorystore.ListInstancesResponse( instances=[ @@ -839,21 +1127,20 @@ def test_list_instances_rest_pager(transport: str = 'rest'): response = tuple(memorystore.ListInstancesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, memorystore.Instance) - for i in results) + assert all(isinstance(i, memorystore.Instance) for i in results) pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -875,7 +1162,9 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -898,48 +1187,51 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -950,23 +1242,23 @@ def test_get_instance_rest_required_fields(request_type=memorystore.GetInstanceR return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_rest_flattened(): @@ -976,16 +1268,18 @@ def test_get_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -995,7 +1289,7 @@ def test_get_instance_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_instance(**mock_args) @@ -1004,10 +1298,14 @@ def test_get_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1018,7 +1316,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( memorystore.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1040,7 +1338,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -1060,7 +1360,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_instance_rest_required_fields(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=memorystore.CreateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} @@ -1068,65 +1370,73 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", "request_id", )) + assert not set(unset_fields) - set( + ( + "instance_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -1136,17 +1446,33 @@ def test_create_instance_rest_required_fields(request_type=memorystore.CreateIns "instanceId", "", ), - ('$alt', 'json;enum-encoding=int') + ("$alt", "json;enum-encoding=int"), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", "requestId", )) & set(("parent", "instanceId", "instance", ))) + assert set(unset_fields) == ( + set( + ( + "instanceId", + "requestId", + ) + ) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) def test_create_instance_rest_flattened(): @@ -1156,18 +1482,18 @@ def test_create_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) mock_args.update(sample_request) @@ -1175,7 +1501,7 @@ def test_create_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.create_instance(**mock_args) @@ -1184,10 +1510,14 @@ def test_create_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*}/instances" + % client.transport._host, + args[1], + ) -def test_create_instance_rest_flattened_error(transport: str = 'rest'): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1198,9 +1528,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( memorystore.CreateInstanceRequest(), - parent='parent_value', - instance=memorystore.Instance(name='name_value'), - instance_id='instance_id_value', + parent="parent_value", + instance=memorystore.Instance(name="name_value"), + instance_id="instance_id_value", ) @@ -1222,7 +1552,9 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -1242,77 +1574,95 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_instance_rest_required_fields(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_required_fields( + request_type=memorystore.UpdateInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("instance", ))) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("instance",)) + ) def test_update_instance_rest_flattened(): @@ -1322,17 +1672,19 @@ def test_update_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } # get truthy value for each flattened field mock_args = dict( - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -1340,7 +1692,7 @@ def test_update_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.update_instance(**mock_args) @@ -1349,10 +1701,14 @@ def test_update_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_update_instance_rest_flattened_error(transport: str = 'rest'): +def test_update_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1363,8 +1719,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( memorystore.UpdateInstanceRequest(), - instance=memorystore.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=memorystore.Instance(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -1386,7 +1742,9 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -1406,57 +1764,62 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_required_fields( + request_type=memorystore.DeleteInstanceRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1464,23 +1827,23 @@ def test_delete_instance_rest_required_fields(request_type=memorystore.DeleteIns response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_instance_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) def test_delete_instance_rest_flattened(): @@ -1490,16 +1853,18 @@ def test_delete_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1507,7 +1872,7 @@ def test_delete_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.delete_instance(**mock_args) @@ -1516,10 +1881,14 @@ def test_delete_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1530,7 +1899,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( memorystore.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1548,12 +1917,19 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_certificate_authority in client._transport._wrapped_methods + assert ( + client._transport.get_certificate_authority + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_certificate_authority] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_certificate_authority + ] = mock_rpc request = {} client.get_certificate_authority(request) @@ -1568,55 +1944,60 @@ def test_get_certificate_authority_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_certificate_authority_rest_required_fields(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_required_fields( + request_type=memorystore.GetCertificateAuthorityRequest, +): transport_class = transports.MemorystoreRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_certificate_authority._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_certificate_authority._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -1627,23 +2008,23 @@ def test_get_certificate_authority_rest_required_fields(request_type=memorystore return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_certificate_authority_rest_unset_required_fields(): - transport = transports.MemorystoreRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_certificate_authority._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_certificate_authority_rest_flattened(): @@ -1653,16 +2034,18 @@ def test_get_certificate_authority_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -1672,7 +2055,7 @@ def test_get_certificate_authority_rest_flattened(): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value client.get_certificate_authority(**mock_args) @@ -1681,10 +2064,14 @@ def test_get_certificate_authority_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority" + % client.transport._host, + args[1], + ) -def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest'): +def test_get_certificate_authority_rest_flattened_error(transport: str = "rest"): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1695,7 +2082,7 @@ def test_get_certificate_authority_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.get_certificate_authority( memorystore.GetCertificateAuthorityRequest(), - name='name_value', + name="name_value", ) @@ -1737,8 +2124,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = MemorystoreClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -1761,16 +2147,20 @@ def test_transport_instance(): assert client.transport is transport -@pytest.mark.parametrize("transport_class", [ - transports.MemorystoreRestTransport, -]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MemorystoreRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_rest(): transport = MemorystoreClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -1780,18 +2170,19 @@ def test_transport_kind_rest(): def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1799,26 +2190,28 @@ def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesR client.list_instances(request) -@pytest.mark.parametrize("request_type", [ - memorystore.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.ListInstancesRequest, + dict, + ], +) def test_list_instances_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -1828,31 +2221,40 @@ def test_list_instances_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_list_instances") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_instances" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.ListInstancesRequest.pb(memorystore.ListInstancesRequest()) + pb_message = memorystore.ListInstancesRequest.pb( + memorystore.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1862,18 +2264,26 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.ListInstancesResponse.to_json(memorystore.ListInstancesResponse()) + return_value = memorystore.ListInstancesResponse.to_json( + memorystore.ListInstancesResponse() + ) req.return_value.content = return_value request = memorystore.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.ListInstancesResponse() - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -1881,18 +2291,19 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -1900,35 +2311,37 @@ def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceReque client.get_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetInstanceRequest, + dict, + ], +) def test_get_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.Instance( - name='name_value', - state=memorystore.Instance.State.CREATING, - uid='uid_value', - replica_count=1384, - authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, - transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, - shard_count=1178, - node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, - engine_version='engine_version_value', - deletion_protection_enabled=True, - mode=memorystore.Instance.Mode.STANDALONE, + name="name_value", + state=memorystore.Instance.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version="engine_version_value", + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, ) # Wrap the value into a proper Response obj @@ -1938,21 +2351,27 @@ def test_get_instance_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.Instance) - assert response.name == 'name_value' + assert response.name == "name_value" assert response.state == memorystore.Instance.State.CREATING - assert response.uid == 'uid_value' + assert response.uid == "uid_value" assert response.replica_count == 1384 - assert response.authorization_mode == memorystore.Instance.AuthorizationMode.AUTH_DISABLED - assert response.transit_encryption_mode == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + assert ( + response.authorization_mode + == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + ) + assert ( + response.transit_encryption_mode + == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + ) assert response.shard_count == 1178 assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO - assert response.engine_version == 'engine_version_value' + assert response.engine_version == "engine_version_value" assert response.deletion_protection_enabled is True assert response.mode == memorystore.Instance.Mode.STANDALONE @@ -1961,14 +2380,21 @@ def test_get_instance_rest_call_success(request_type): def test_get_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) @@ -1985,33 +2411,42 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.Instance() - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanceRequest): +def test_create_instance_rest_bad_request( + request_type=memorystore.CreateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2019,19 +2454,82 @@ def test_create_instance_rest_bad_request(request_type=memorystore.CreateInstanc client.create_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.CreateInstanceRequest, + dict, + ], +) def test_create_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2051,7 +2549,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2065,7 +2563,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2080,12 +2578,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2098,15 +2600,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instance(request) @@ -2118,18 +2620,28 @@ def get_message_fields(field): def test_create_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_create_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_create_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.CreateInstanceRequest.pb(memorystore.CreateInstanceRequest()) + pb_message = memorystore.CreateInstanceRequest.pb( + memorystore.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2143,33 +2655,44 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanceRequest): +def test_update_instance_rest_bad_request( + request_type=memorystore.UpdateInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2177,19 +2700,84 @@ def test_update_instance_rest_bad_request(request_type=memorystore.UpdateInstanc client.update_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.UpdateInstanceRequest, + dict, + ], +) def test_update_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'state': 1, 'state_info': {'update_info': {'target_shard_count': 1920, 'target_replica_count': 2126}}, 'uid': 'uid_value', 'replica_count': 1384, 'authorization_mode': 1, 'transit_encryption_mode': 1, 'shard_count': 1178, 'discovery_endpoints': [{'address': 'address_value', 'port': 453, 'network': 'network_value'}], 'node_type': 1, 'persistence_config': {'mode': 1, 'rdb_config': {'rdb_snapshot_period': 1, 'rdb_snapshot_start_time': {}}, 'aof_config': {'append_fsync': 1}}, 'engine_version': 'engine_version_value', 'engine_configs': {}, 'node_config': {'size_gb': 0.739}, 'zone_distribution_config': {'zone': 'zone_value', 'mode': 1}, 'deletion_protection_enabled': True, 'psc_auto_connections': [{'port': 453, 'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}], 'endpoints': [{'connections': [{'psc_auto_connection': {}, 'psc_connection': {'psc_connection_id': 'psc_connection_id_value', 'ip_address': 'ip_address_value', 'forwarding_rule': 'forwarding_rule_value', 'project_id': 'project_id_value', 'network': 'network_value', 'service_attachment': 'service_attachment_value', 'psc_connection_status': 1, 'connection_type': 1}}]}], 'mode': 1} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -2209,7 +2797,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -2223,7 +2811,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -2238,12 +2826,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -2256,15 +2848,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_instance(request) @@ -2276,18 +2868,28 @@ def get_message_fields(field): def test_update_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_update_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_update_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.UpdateInstanceRequest.pb(memorystore.UpdateInstanceRequest()) + pb_message = memorystore.UpdateInstanceRequest.pb( + memorystore.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2301,33 +2903,42 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanceRequest): +def test_delete_instance_rest_bad_request( + request_type=memorystore.DeleteInstanceRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2335,30 +2946,32 @@ def test_delete_instance_rest_bad_request(request_type=memorystore.DeleteInstanc client.delete_instance(request) -@pytest.mark.parametrize("request_type", [ - memorystore.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instance(request) @@ -2370,18 +2983,28 @@ def test_delete_instance_rest_call_success(request_type): def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_delete_instance") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_delete_instance" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.DeleteInstanceRequest.pb(memorystore.DeleteInstanceRequest()) + pb_message = memorystore.DeleteInstanceRequest.pb( + memorystore.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2395,33 +3018,42 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = memorystore.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() -def test_get_certificate_authority_rest_bad_request(request_type=memorystore.GetCertificateAuthorityRequest): +def test_get_certificate_authority_rest_bad_request( + request_type=memorystore.GetCertificateAuthorityRequest, +): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -2429,25 +3061,27 @@ def test_get_certificate_authority_rest_bad_request(request_type=memorystore.Get client.get_certificate_authority(request) -@pytest.mark.parametrize("request_type", [ - memorystore.GetCertificateAuthorityRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetCertificateAuthorityRequest, + dict, + ], +) def test_get_certificate_authority_rest_call_success(request_type): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = memorystore.CertificateAuthority( - name='name_value', + name="name_value", ) # Wrap the value into a proper Response obj @@ -2457,30 +3091,39 @@ def test_get_certificate_authority_rest_call_success(request_type): # Convert return value to protobuf type return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_certificate_authority(request) # Establish that the response is the type that we expect. assert isinstance(response, memorystore.CertificateAuthority) - assert response.name == 'name_value' + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_certificate_authority_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MemorystoreRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) client = MemorystoreClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "post_get_certificate_authority") as post, \ - mock.patch.object(transports.MemorystoreRestInterceptor, "pre_get_certificate_authority") as pre: + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_certificate_authority" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" + ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = memorystore.GetCertificateAuthorityRequest.pb(memorystore.GetCertificateAuthorityRequest()) + pb_message = memorystore.GetCertificateAuthorityRequest.pb( + memorystore.GetCertificateAuthorityRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2490,18 +3133,26 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - return_value = memorystore.CertificateAuthority.to_json(memorystore.CertificateAuthority()) + return_value = memorystore.CertificateAuthority.to_json( + memorystore.CertificateAuthority() + ) req.return_value.content = return_value request = memorystore.GetCertificateAuthorityRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = memorystore.CertificateAuthority() - client.get_certificate_authority(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_certificate_authority( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -2513,13 +3164,17 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2527,20 +3182,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -2548,7 +3206,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2558,19 +3216,23 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2578,20 +3240,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -2599,7 +3264,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2609,19 +3274,25 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2629,28 +3300,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2660,19 +3334,25 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2680,28 +3360,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2711,19 +3394,25 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2731,20 +3420,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -2752,7 +3444,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2762,19 +3454,25 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -2782,20 +3480,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -2803,7 +3504,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2812,10 +3513,10 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_initialize_client_w_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -2829,9 +3530,7 @@ def test_list_instances_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -2851,9 +3550,7 @@ def test_get_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -2873,9 +3570,7 @@ def test_create_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -2895,9 +3590,7 @@ def test_update_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -2917,9 +3610,7 @@ def test_delete_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -2940,8 +3631,8 @@ def test_get_certificate_authority_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_certificate_authority), - '__call__') as call: + type(client.transport.get_certificate_authority), "__call__" + ) as call: client.get_certificate_authority(request=None) # Establish that the underlying stub method was called. @@ -2962,7 +3653,7 @@ def test_memorystore_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. @@ -2974,13 +3665,15 @@ def test_memorystore_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_memorystore_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport.__init__') as Transport: + with mock.patch( + "google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.MemorystoreTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2989,18 +3682,18 @@ def test_memorystore_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_instances', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'get_certificate_authority', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "get_certificate_authority", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3016,7 +3709,7 @@ def test_memorystore_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3025,25 +3718,30 @@ def test_memorystore_base_transport(): def test_memorystore_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_memorystore_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.memorystore_v1beta.services.memorystore.transports.MemorystoreTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MemorystoreTransport() @@ -3052,61 +3750,75 @@ def test_memorystore_base_transport_with_adc(): def test_memorystore_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) MemorystoreClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) def test_memorystore_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MemorystoreRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MemorystoreRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_no_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com' + "memorystore.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_host_with_port(transport_name): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='memorystore.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="memorystore.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'memorystore.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://memorystore.googleapis.com:8000' + "memorystore.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://memorystore.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_memorystore_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -3137,11 +3849,16 @@ def test_memorystore_client_transport_session_collision(transport_name): session2 = client2.transport.get_certificate_authority._session assert session1 != session2 + def test_certificate_authority_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.certificate_authority_path(project, location, instance) assert expected == actual @@ -3158,11 +3875,18 @@ def test_parse_certificate_authority_path(): actual = MemorystoreClient.parse_certificate_authority_path(path) assert expected == actual + def test_forwarding_rule_path(): project = "cuttlefish" region = "mussel" forwarding_rule = "winkle" - expected = "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format(project=project, region=region, forwarding_rule=forwarding_rule, ) + expected = ( + "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( + project=project, + region=region, + forwarding_rule=forwarding_rule, + ) + ) actual = MemorystoreClient.forwarding_rule_path(project, region, forwarding_rule) assert expected == actual @@ -3179,11 +3903,16 @@ def test_parse_forwarding_rule_path(): actual = MemorystoreClient.parse_forwarding_rule_path(path) assert expected == actual + def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) actual = MemorystoreClient.instance_path(project, location, instance) assert expected == actual @@ -3200,10 +3929,14 @@ def test_parse_instance_path(): actual = MemorystoreClient.parse_instance_path(path) assert expected == actual + def test_network_path(): project = "cuttlefish" network = "mussel" - expected = "projects/{project}/global/networks/{network}".format(project=project, network=network, ) + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) actual = MemorystoreClient.network_path(project, network) assert expected == actual @@ -3219,12 +3952,19 @@ def test_parse_network_path(): actual = MemorystoreClient.parse_network_path(path) assert expected == actual + def test_service_attachment_path(): project = "scallop" region = "abalone" service_attachment = "squid" - expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format(project=project, region=region, service_attachment=service_attachment, ) - actual = MemorystoreClient.service_attachment_path(project, region, service_attachment) + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = MemorystoreClient.service_attachment_path( + project, region, service_attachment + ) assert expected == actual @@ -3240,9 +3980,12 @@ def test_parse_service_attachment_path(): actual = MemorystoreClient.parse_service_attachment_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = MemorystoreClient.common_billing_account_path(billing_account) assert expected == actual @@ -3257,9 +4000,12 @@ def test_parse_common_billing_account_path(): actual = MemorystoreClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MemorystoreClient.common_folder_path(folder) assert expected == actual @@ -3274,9 +4020,12 @@ def test_parse_common_folder_path(): actual = MemorystoreClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MemorystoreClient.common_organization_path(organization) assert expected == actual @@ -3291,9 +4040,12 @@ def test_parse_common_organization_path(): actual = MemorystoreClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = MemorystoreClient.common_project_path(project) assert expected == actual @@ -3308,10 +4060,14 @@ def test_parse_common_project_path(): actual = MemorystoreClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = MemorystoreClient.common_location_path(project, location) assert expected == actual @@ -3331,14 +4087,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.MemorystoreTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MemorystoreTransport, "_prep_wrapped_messages" + ) as prep: transport_class = MemorystoreClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3349,10 +4109,11 @@ def test_client_with_default_client_info(): def test_transport_close_rest(): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -3360,12 +4121,11 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', + "rest", ] for transport in transports: client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -3374,9 +4134,13 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (MemorystoreClient, transports.MemorystoreRestTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MemorystoreClient, transports.MemorystoreRestTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -3391,7 +4155,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From 1c064cd1a71c54b1246fc987e35c2be5cf2df832 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 26 Nov 2024 12:58:26 +0000 Subject: [PATCH 6/6] update README.rst --- .../.repo-metadata.json | 6 +++--- packages/google-cloud-memorystore/README.rst | 20 +++++++++---------- .../google-cloud-memorystore/docs/index.rst | 2 +- .../docs/summary_overview.md | 6 +++--- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-memorystore/.repo-metadata.json b/packages/google-cloud-memorystore/.repo-metadata.json index 4739821ab93b..4a6b46d94de6 100644 --- a/packages/google-cloud-memorystore/.repo-metadata.json +++ b/packages/google-cloud-memorystore/.repo-metadata.json @@ -1,8 +1,8 @@ { "name": "google-cloud-memorystore", - "name_pretty": "", - "api_description": "", - "product_documentation": "", + "name_pretty": "Memorystore", + "api_description": "Memorystore for Valkey is a fully managed Valkey Cluster service for Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Valkey service without the burden of managing complex Valkey deployments.", + "product_documentation": "https://cloud.google.com/memorystore/docs/valkey", "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", "release_level": "preview", diff --git a/packages/google-cloud-memorystore/README.rst b/packages/google-cloud-memorystore/README.rst index c7ac077d4c69..68a86422046f 100644 --- a/packages/google-cloud-memorystore/README.rst +++ b/packages/google-cloud-memorystore/README.rst @@ -1,9 +1,9 @@ -Python Client for -================== +Python Client for Memorystore +============================= |preview| |pypi| |versions| -``_: +`Memorystore`_: Memorystore for Valkey is a fully managed Valkey Cluster service for Google Cloud. Applications running on Google Cloud can achieve extreme performance by leveraging the highly scalable, available, secure Valkey service without the burden of managing complex Valkey deployments. - `Client Library Documentation`_ - `Product Documentation`_ @@ -14,9 +14,9 @@ Python Client for :target: https://pypi.org/project/google-cloud-memorystore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-memorystore.svg :target: https://pypi.org/project/google-cloud-memorystore/ -.. _: +.. _Memorystore: https://cloud.google.com/memorystore/docs/valkey .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_overview -.. _Product Documentation: +.. _Product Documentation: https://cloud.google.com/memorystore/docs/valkey Quick Start ----------- @@ -25,12 +25,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ -3. `Enable the .`_ +3. `Enable the Memorystore.`_ 4. `Setup Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the .: +.. _Enable the Memorystore.: https://cloud.google.com/memorystore/docs/valkey .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation @@ -97,12 +97,12 @@ Windows Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for +- Read the `Client Library Documentation`_ for Memorystore to see other available methods on the client. -- Read the ` Product documentation`_ to learn +- Read the `Memorystore Product documentation`_ to learn more about the product and see How-to Guides. - View this `README`_ to see the full list of Cloud APIs that we cover. -.. _ Product documentation: +.. _Memorystore Product documentation: https://cloud.google.com/memorystore/docs/valkey .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-memorystore/docs/index.rst b/packages/google-cloud-memorystore/docs/index.rst index e2ce5cdc7aeb..2c2732bc2cfa 100644 --- a/packages/google-cloud-memorystore/docs/index.rst +++ b/packages/google-cloud-memorystore/docs/index.rst @@ -2,7 +2,7 @@ .. include:: multiprocessing.rst -This package includes clients for multiple versions of . +This package includes clients for multiple versions of Memorystore. By default, you will get version ``memorystore_v1``. diff --git a/packages/google-cloud-memorystore/docs/summary_overview.md b/packages/google-cloud-memorystore/docs/summary_overview.md index 607b7f1693fc..3bdcbee833e6 100644 --- a/packages/google-cloud-memorystore/docs/summary_overview.md +++ b/packages/google-cloud-memorystore/docs/summary_overview.md @@ -5,14 +5,14 @@ reverted. Instead, if you want to place additional content, create an pick up on the content and merge the content. ]: # -# API +# Memorystore API -Overview of the APIs available for API. +Overview of the APIs available for Memorystore API. ## All entries Classes, methods and properties & attributes for - API. +Memorystore API. [classes](https://cloud.google.com/python/docs/reference/google-cloud-memorystore/latest/summary_class.html)